// Copyright 2015-2026 The Khronos Group Inc. // // SPDX-License-Identifier: Apache-2.0 OR MIT // // This header is generated from the Khronos Vulkan XML API Registry. #ifndef VULKAN_STRUCTS_HPP #define VULKAN_STRUCTS_HPP // include-what-you-use: make sure, vulkan.hpp is used by code-completers // IWYU pragma: private, include "vulkan/vulkan.hpp" #if !defined( VULKAN_HPP_CXX_MODULE ) # include // strcmp #endif namespace VULKAN_HPP_NAMESPACE { //=============== //=== STRUCTS === //=============== // wrapper struct for struct VkAabbPositionsKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAabbPositionsKHR.html struct AabbPositionsKHR { using NativeType = VkAabbPositionsKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT : minX{ minX_ } , minY{ minY_ } , minZ{ minZ_ } , maxX{ maxX_ } , maxY{ maxY_ } , maxZ{ maxZ_ } { } VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AabbPositionsKHR( *reinterpret_cast( &rhs ) ) {} AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinX( float minX_ ) & VULKAN_HPP_NOEXCEPT { minX = minX_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMinX( float minX_ ) && VULKAN_HPP_NOEXCEPT { minX = minX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinY( float minY_ ) & VULKAN_HPP_NOEXCEPT { minY = minY_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMinY( float minY_ ) && VULKAN_HPP_NOEXCEPT { minY = minY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMinZ( float minZ_ ) & VULKAN_HPP_NOEXCEPT { minZ = minZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMinZ( float minZ_ ) && VULKAN_HPP_NOEXCEPT { minZ = minZ_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxX( float maxX_ ) & VULKAN_HPP_NOEXCEPT { maxX = maxX_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMaxX( float maxX_ ) && VULKAN_HPP_NOEXCEPT { maxX = maxX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxY( float maxY_ ) & VULKAN_HPP_NOEXCEPT { maxY = maxY_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMaxY( float maxY_ ) && VULKAN_HPP_NOEXCEPT { maxY = maxY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & setMaxZ( float maxZ_ ) & VULKAN_HPP_NOEXCEPT { maxZ = maxZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR && setMaxZ( float maxZ_ ) && VULKAN_HPP_NOEXCEPT { maxZ = maxZ_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAabbPositionsKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAabbPositionsKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAabbPositionsKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( minX, minY, minZ, maxX, maxY, maxZ ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AabbPositionsKHR const & ) const = default; #else bool operator==( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( minX == rhs.minX ) && ( minY == rhs.minY ) && ( minZ == rhs.minZ ) && ( maxX == rhs.maxX ) && ( maxY == rhs.maxY ) && ( maxZ == rhs.maxZ ); # endif } bool operator!=( AabbPositionsKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float minX = {}; float minY = {}; float minZ = {}; float maxX = {}; float maxY = {}; float maxZ = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AabbPositionsKHR; }; #endif using AabbPositionsNV = AabbPositionsKHR; union DeviceOrHostAddressConstKHR { using NativeType = VkDeviceOrHostAddressConstKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR & setHostAddress( const void * hostAddress_ ) & VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstKHR && setHostAddress( const void * hostAddress_ ) && VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceOrHostAddressConstKHR const &() const { return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressConstKHR &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS DeviceAddress deviceAddress; const void * hostAddress; #else VkDeviceAddress deviceAddress; const void * hostAddress; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceOrHostAddressConstKHR; }; #endif // wrapper struct for struct VkAccelerationStructureGeometryTrianglesDataKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryTrianglesDataKHR.html struct AccelerationStructureGeometryTrianglesDataKHR { using NativeType = VkAccelerationStructureGeometryTrianglesDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( Format vertexFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR vertexData_ = {}, DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, IndexType indexType_ = IndexType::eUint16, DeviceOrHostAddressConstKHR indexData_ = {}, DeviceOrHostAddressConstKHR transformData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexFormat{ vertexFormat_ } , vertexData{ vertexData_ } , vertexStride{ vertexStride_ } , maxVertex{ maxVertex_ } , indexType{ indexType_ } , indexData{ indexData_ } , transformData{ transformData_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( Format vertexFormat_ ) & VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setVertexFormat( Format vertexFormat_ ) && VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) & VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) && VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( DeviceSize vertexStride_ ) & VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setVertexStride( DeviceSize vertexStride_ ) && VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) & VULKAN_HPP_NOEXCEPT { maxVertex = maxVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setMaxVertex( uint32_t maxVertex_ ) && VULKAN_HPP_NOEXCEPT { maxVertex = maxVertex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) & VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) && VULKAN_HPP_NOEXCEPT { indexData = indexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR & setTransformData( DeviceOrHostAddressConstKHR const & transformData_ ) & VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryTrianglesDataKHR && setTransformData( DeviceOrHostAddressConstKHR const & transformData_ ) && VULKAN_HPP_NOEXCEPT { transformData = transformData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryTrianglesDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryTrianglesDataKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryTrianglesDataKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, maxVertex, indexType, indexData, transformData ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR; const void * pNext = {}; Format vertexFormat = Format::eUndefined; DeviceOrHostAddressConstKHR vertexData = {}; DeviceSize vertexStride = {}; uint32_t maxVertex = {}; IndexType indexType = IndexType::eUint16; DeviceOrHostAddressConstKHR indexData = {}; DeviceOrHostAddressConstKHR transformData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryTrianglesDataKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryTrianglesDataKHR; }; // wrapper struct for struct VkAccelerationStructureGeometryAabbsDataKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryAabbsDataKHR.html struct AccelerationStructureGeometryAabbsDataKHR { using NativeType = VkAccelerationStructureGeometryAabbsDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( DeviceOrHostAddressConstKHR data_ = {}, DeviceSize stride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , data{ data_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setData( DeviceOrHostAddressConstKHR const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR && setData( DeviceOrHostAddressConstKHR const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR & setStride( DeviceSize stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR && setStride( DeviceSize stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryAabbsDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryAabbsDataKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryAabbsDataKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, data, stride ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR; const void * pNext = {}; DeviceOrHostAddressConstKHR data = {}; DeviceSize stride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryAabbsDataKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryAabbsDataKHR; }; // wrapper struct for struct VkAccelerationStructureGeometryInstancesDataKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryInstancesDataKHR.html struct AccelerationStructureGeometryInstancesDataKHR { using NativeType = VkAccelerationStructureGeometryInstancesDataKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( Bool32 arrayOfPointers_ = {}, DeviceOrHostAddressConstKHR data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , arrayOfPointers{ arrayOfPointers_ } , data{ data_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( Bool32 arrayOfPointers_ ) & VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR && setArrayOfPointers( Bool32 arrayOfPointers_ ) && VULKAN_HPP_NOEXCEPT { arrayOfPointers = arrayOfPointers_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR & setData( DeviceOrHostAddressConstKHR const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR && setData( DeviceOrHostAddressConstKHR const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryInstancesDataKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryInstancesDataKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryInstancesDataKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, arrayOfPointers, data ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR; const void * pNext = {}; Bool32 arrayOfPointers = {}; DeviceOrHostAddressConstKHR data = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryInstancesDataKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryInstancesDataKHR; }; union AccelerationStructureGeometryDataKHR { using NativeType = VkAccelerationStructureGeometryDataKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} ) : triangles( triangles_ ) {} VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryAabbsDataKHR aabbs_ ) : aabbs( aabbs_ ) {} VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR( AccelerationStructureGeometryInstancesDataKHR instances_ ) : instances( instances_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setTriangles( AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) & VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR && setTriangles( AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) && VULKAN_HPP_NOEXCEPT { triangles = triangles_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setAabbs( AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) & VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR && setAabbs( AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) && VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR & setInstances( AccelerationStructureGeometryInstancesDataKHR const & instances_ ) & VULKAN_HPP_NOEXCEPT { instances = instances_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryDataKHR && setInstances( AccelerationStructureGeometryInstancesDataKHR const & instances_ ) && VULKAN_HPP_NOEXCEPT { instances = instances_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryDataKHR const &() const { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryDataKHR &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS AccelerationStructureGeometryTrianglesDataKHR triangles; AccelerationStructureGeometryAabbsDataKHR aabbs; AccelerationStructureGeometryInstancesDataKHR instances; #else VkAccelerationStructureGeometryTrianglesDataKHR triangles; VkAccelerationStructureGeometryAabbsDataKHR aabbs; VkAccelerationStructureGeometryInstancesDataKHR instances; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryDataKHR; }; #endif // wrapper struct for struct VkAccelerationStructureGeometryKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryKHR.html struct AccelerationStructureGeometryKHR { using NativeType = VkAccelerationStructureGeometryKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( GeometryTypeKHR geometryType_ = GeometryTypeKHR::eTriangles, AccelerationStructureGeometryDataKHR geometry_ = {}, GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , geometryType{ geometryType_ } , geometry{ geometry_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometryType( GeometryTypeKHR geometryType_ ) & VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR && setGeometryType( GeometryTypeKHR geometryType_ ) && VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setGeometry( AccelerationStructureGeometryDataKHR const & geometry_ ) & VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR && setGeometry( AccelerationStructureGeometryDataKHR const & geometry_ ) && VULKAN_HPP_NOEXCEPT { geometry = geometry_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR & setFlags( GeometryFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryKHR && setFlags( GeometryFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, geometryType, geometry, flags ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryKHR; const void * pNext = {}; GeometryTypeKHR geometryType = GeometryTypeKHR::eTriangles; AccelerationStructureGeometryDataKHR geometry = {}; GeometryFlagsKHR flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryKHR; }; union DeviceOrHostAddressKHR { using NativeType = VkDeviceOrHostAddressKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR( void * hostAddress_ ) : hostAddress( hostAddress_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR & setHostAddress( void * hostAddress_ ) & VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressKHR && setHostAddress( void * hostAddress_ ) && VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceOrHostAddressKHR const &() const { return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressKHR &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS DeviceAddress deviceAddress; void * hostAddress; #else VkDeviceAddress deviceAddress; void * hostAddress; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceOrHostAddressKHR; }; #endif // wrapper struct for struct VkAccelerationStructureBuildGeometryInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildGeometryInfoKHR.html struct AccelerationStructureBuildGeometryInfoKHR { using NativeType = VkAccelerationStructureBuildGeometryInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureTypeKHR type_ = AccelerationStructureTypeKHR::eTopLevel, BuildAccelerationStructureFlagsKHR flags_ = {}, BuildAccelerationStructureModeKHR mode_ = BuildAccelerationStructureModeKHR::eBuild, AccelerationStructureKHR srcAccelerationStructure_ = {}, AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const AccelerationStructureGeometryKHR * pGeometries_ = {}, const AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , flags{ flags_ } , mode{ mode_ } , srcAccelerationStructure{ srcAccelerationStructure_ } , dstAccelerationStructure{ dstAccelerationStructure_ } , geometryCount{ geometryCount_ } , pGeometries{ pGeometries_ } , ppGeometries{ ppGeometries_ } , scratchData{ scratchData_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureTypeKHR type_, BuildAccelerationStructureFlagsKHR flags_, BuildAccelerationStructureModeKHR mode_, AccelerationStructureKHR srcAccelerationStructure_, AccelerationStructureKHR dstAccelerationStructure_, ArrayProxyNoTemporaries const & geometries_, ArrayProxyNoTemporaries const & pGeometries_ = {}, DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , type( type_ ) , flags( flags_ ) , mode( mode_ ) , srcAccelerationStructure( srcAccelerationStructure_ ) , dstAccelerationStructure( dstAccelerationStructure_ ) , geometryCount( static_cast( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ) , pGeometries( geometries_.data() ) , ppGeometries( pGeometries_.data() ) , scratchData( scratchData_ ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) <= 1 ); # else if ( 1 < ( !geometries_.empty() + !pGeometries_.empty() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: 1 < ( !geometries_.empty() + !pGeometries_.empty() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setType( AccelerationStructureTypeKHR type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setType( AccelerationStructureTypeKHR type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setFlags( BuildAccelerationStructureFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setMode( BuildAccelerationStructureModeKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setMode( BuildAccelerationStructureModeKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( AccelerationStructureKHR srcAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setSrcAccelerationStructure( AccelerationStructureKHR srcAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( AccelerationStructureKHR dstAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setDstAccelerationStructure( AccelerationStructureKHR dstAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { dstAccelerationStructure = dstAccelerationStructure_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) & VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setGeometryCount( uint32_t geometryCount_ ) && VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPGeometries( const AccelerationStructureGeometryKHR * pGeometries_ ) & VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setPGeometries( const AccelerationStructureGeometryKHR * pGeometries_ ) && VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureBuildGeometryInfoKHR & setGeometries( ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast( geometries_.size() ); pGeometries = geometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const AccelerationStructureGeometryKHR * const * ppGeometries_ ) & VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setPpGeometries( const AccelerationStructureGeometryKHR * const * ppGeometries_ ) && VULKAN_HPP_NOEXCEPT { ppGeometries = ppGeometries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureBuildGeometryInfoKHR & setPGeometries( ArrayProxyNoTemporaries const & pGeometries_ ) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast( pGeometries_.size() ); ppGeometries = pGeometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR & setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) & VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildGeometryInfoKHR && setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) && VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureBuildGeometryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildGeometryInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureBuildGeometryInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, flags, mode, srcAccelerationStructure, dstAccelerationStructure, geometryCount, pGeometries, ppGeometries, scratchData ); } #endif public: StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR; const void * pNext = {}; AccelerationStructureTypeKHR type = AccelerationStructureTypeKHR::eTopLevel; BuildAccelerationStructureFlagsKHR flags = {}; BuildAccelerationStructureModeKHR mode = BuildAccelerationStructureModeKHR::eBuild; AccelerationStructureKHR srcAccelerationStructure = {}; AccelerationStructureKHR dstAccelerationStructure = {}; uint32_t geometryCount = {}; const AccelerationStructureGeometryKHR * pGeometries = {}; const AccelerationStructureGeometryKHR * const * ppGeometries = {}; DeviceOrHostAddressKHR scratchData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureBuildGeometryInfoKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureBuildGeometryInfoKHR; }; // wrapper struct for struct VkAccelerationStructureBuildRangeInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildRangeInfoKHR.html struct AccelerationStructureBuildRangeInfoKHR { using NativeType = VkAccelerationStructureBuildRangeInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT : primitiveCount{ primitiveCount_ } , primitiveOffset{ primitiveOffset_ } , firstVertex{ firstVertex_ } , transformOffset{ transformOffset_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) & VULKAN_HPP_NOEXCEPT { primitiveCount = primitiveCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR && setPrimitiveCount( uint32_t primitiveCount_ ) && VULKAN_HPP_NOEXCEPT { primitiveCount = primitiveCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) & VULKAN_HPP_NOEXCEPT { primitiveOffset = primitiveOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR && setPrimitiveOffset( uint32_t primitiveOffset_ ) && VULKAN_HPP_NOEXCEPT { primitiveOffset = primitiveOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) & VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR && setFirstVertex( uint32_t firstVertex_ ) && VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) & VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR && setTransformOffset( uint32_t transformOffset_ ) && VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureBuildRangeInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildRangeInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureBuildRangeInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( primitiveCount, primitiveOffset, firstVertex, transformOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureBuildRangeInfoKHR const & ) const = default; #else bool operator==( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( primitiveCount == rhs.primitiveCount ) && ( primitiveOffset == rhs.primitiveOffset ) && ( firstVertex == rhs.firstVertex ) && ( transformOffset == rhs.transformOffset ); # endif } bool operator!=( AccelerationStructureBuildRangeInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t primitiveCount = {}; uint32_t primitiveOffset = {}; uint32_t firstVertex = {}; uint32_t transformOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureBuildRangeInfoKHR; }; #endif // wrapper struct for struct VkAccelerationStructureBuildSizesInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureBuildSizesInfoKHR.html struct AccelerationStructureBuildSizesInfoKHR { using NativeType = VkAccelerationStructureBuildSizesInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( DeviceSize accelerationStructureSize_ = {}, DeviceSize updateScratchSize_ = {}, DeviceSize buildScratchSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , accelerationStructureSize{ accelerationStructureSize_ } , updateScratchSize{ updateScratchSize_ } , buildScratchSize{ buildScratchSize_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAccelerationStructureBuildSizesInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureBuildSizesInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureBuildSizesInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, accelerationStructureSize, updateScratchSize, buildScratchSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureBuildSizesInfoKHR const & ) const = default; #else bool operator==( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructureSize == rhs.accelerationStructureSize ) && ( updateScratchSize == rhs.updateScratchSize ) && ( buildScratchSize == rhs.buildScratchSize ); # endif } bool operator!=( AccelerationStructureBuildSizesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR; void * pNext = {}; DeviceSize accelerationStructureSize = {}; DeviceSize updateScratchSize = {}; DeviceSize buildScratchSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureBuildSizesInfoKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureBuildSizesInfoKHR; }; // wrapper struct for struct VkAccelerationStructureCaptureDescriptorDataInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCaptureDescriptorDataInfoEXT.html struct AccelerationStructureCaptureDescriptorDataInfoEXT { using NativeType = VkAccelerationStructureCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureKHR accelerationStructure_ = {}, AccelerationStructureNV accelerationStructureNV_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , accelerationStructure{ accelerationStructure_ } , accelerationStructureNV{ accelerationStructureNV_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCaptureDescriptorDataInfoEXT( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureCaptureDescriptorDataInfoEXT & operator=( VkAccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT && setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT & setAccelerationStructureNV( AccelerationStructureNV accelerationStructureNV_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureNV = accelerationStructureNV_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCaptureDescriptorDataInfoEXT && setAccelerationStructureNV( AccelerationStructureNV accelerationStructureNV_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureNV = accelerationStructureNV_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, accelerationStructure, accelerationStructureNV ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureCaptureDescriptorDataInfoEXT const & ) const = default; #else bool operator==( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( accelerationStructureNV == rhs.accelerationStructureNV ); # endif } bool operator!=( AccelerationStructureCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureCaptureDescriptorDataInfoEXT; const void * pNext = {}; AccelerationStructureKHR accelerationStructure = {}; AccelerationStructureNV accelerationStructureNV = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureCaptureDescriptorDataInfoEXT; }; #endif template <> struct CppType { using Type = AccelerationStructureCaptureDescriptorDataInfoEXT; }; // wrapper struct for struct VkAccelerationStructureCreateInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoKHR.html struct AccelerationStructureCreateInfoKHR { using NativeType = VkAccelerationStructureCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateFlagsKHR createFlags_ = {}, Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, AccelerationStructureTypeKHR type_ = AccelerationStructureTypeKHR::eTopLevel, DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , createFlags{ createFlags_ } , buffer{ buffer_ } , offset{ offset_ } , size{ size_ } , type{ type_ } , deviceAddress{ deviceAddress_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setCreateFlags( AccelerationStructureCreateFlagsKHR createFlags_ ) & VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setCreateFlags( AccelerationStructureCreateFlagsKHR createFlags_ ) && VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setType( AccelerationStructureTypeKHR type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setType( AccelerationStructureTypeKHR type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureCreateInfoKHR const & ) const = default; #else bool operator==( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); # endif } bool operator!=( AccelerationStructureCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR; const void * pNext = {}; AccelerationStructureCreateFlagsKHR createFlags = {}; Buffer buffer = {}; DeviceSize offset = {}; DeviceSize size = {}; AccelerationStructureTypeKHR type = AccelerationStructureTypeKHR::eTopLevel; DeviceAddress deviceAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureCreateInfoKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureCreateInfoKHR; }; // wrapper struct for struct VkGeometryTrianglesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryTrianglesNV.html struct GeometryTrianglesNV { using NativeType = VkGeometryTrianglesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( Buffer vertexData_ = {}, DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, DeviceSize vertexStride_ = {}, Format vertexFormat_ = Format::eUndefined, Buffer indexData_ = {}, DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, IndexType indexType_ = IndexType::eUint16, Buffer transformData_ = {}, DeviceSize transformOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexData{ vertexData_ } , vertexOffset{ vertexOffset_ } , vertexCount{ vertexCount_ } , vertexStride{ vertexStride_ } , vertexFormat{ vertexFormat_ } , indexData{ indexData_ } , indexOffset{ indexOffset_ } , indexCount{ indexCount_ } , indexType{ indexType_ } , transformData{ transformData_ } , transformOffset{ transformOffset_ } { } VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryTrianglesNV( *reinterpret_cast( &rhs ) ) { } GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexData( Buffer vertexData_ ) & VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setVertexData( Buffer vertexData_ ) && VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexOffset( DeviceSize vertexOffset_ ) & VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setVertexOffset( DeviceSize vertexOffset_ ) && VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) & VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setVertexCount( uint32_t vertexCount_ ) && VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexStride( DeviceSize vertexStride_ ) & VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setVertexStride( DeviceSize vertexStride_ ) && VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setVertexFormat( Format vertexFormat_ ) & VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setVertexFormat( Format vertexFormat_ ) && VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexData( Buffer indexData_ ) & VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setIndexData( Buffer indexData_ ) && VULKAN_HPP_NOEXCEPT { indexData = indexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexOffset( DeviceSize indexOffset_ ) & VULKAN_HPP_NOEXCEPT { indexOffset = indexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setIndexOffset( DeviceSize indexOffset_ ) && VULKAN_HPP_NOEXCEPT { indexOffset = indexOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) & VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setIndexCount( uint32_t indexCount_ ) && VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformData( Buffer transformData_ ) & VULKAN_HPP_NOEXCEPT { transformData = transformData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setTransformData( Buffer transformData_ ) && VULKAN_HPP_NOEXCEPT { transformData = transformData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & setTransformOffset( DeviceSize transformOffset_ ) & VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV && setTransformOffset( DeviceSize transformOffset_ ) && VULKAN_HPP_NOEXCEPT { transformOffset = transformOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryTrianglesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryTrianglesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeometryTrianglesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexData, vertexOffset, vertexCount, vertexStride, vertexFormat, indexData, indexOffset, indexCount, indexType, transformData, transformOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeometryTrianglesNV const & ) const = default; #else bool operator==( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexData == rhs.vertexData ) && ( vertexOffset == rhs.vertexOffset ) && ( vertexCount == rhs.vertexCount ) && ( vertexStride == rhs.vertexStride ) && ( vertexFormat == rhs.vertexFormat ) && ( indexData == rhs.indexData ) && ( indexOffset == rhs.indexOffset ) && ( indexCount == rhs.indexCount ) && ( indexType == rhs.indexType ) && ( transformData == rhs.transformData ) && ( transformOffset == rhs.transformOffset ); # endif } bool operator!=( GeometryTrianglesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeometryTrianglesNV; const void * pNext = {}; Buffer vertexData = {}; DeviceSize vertexOffset = {}; uint32_t vertexCount = {}; DeviceSize vertexStride = {}; Format vertexFormat = Format::eUndefined; Buffer indexData = {}; DeviceSize indexOffset = {}; uint32_t indexCount = {}; IndexType indexType = IndexType::eUint16; Buffer transformData = {}; DeviceSize transformOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeometryTrianglesNV; }; #endif template <> struct CppType { using Type = GeometryTrianglesNV; }; // wrapper struct for struct VkGeometryAABBNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryAABBNV.html struct GeometryAABBNV { using NativeType = VkGeometryAABBNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryAABBNV( Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, DeviceSize offset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , aabbData{ aabbData_ } , numAABBs{ numAABBs_ } , stride{ stride_ } , offset{ offset_ } { } VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryAABBNV( *reinterpret_cast( &rhs ) ) {} GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setAabbData( Buffer aabbData_ ) & VULKAN_HPP_NOEXCEPT { aabbData = aabbData_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV && setAabbData( Buffer aabbData_ ) && VULKAN_HPP_NOEXCEPT { aabbData = aabbData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) & VULKAN_HPP_NOEXCEPT { numAABBs = numAABBs_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV && setNumAABBs( uint32_t numAABBs_ ) && VULKAN_HPP_NOEXCEPT { numAABBs = numAABBs_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setStride( uint32_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV && setStride( uint32_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryAABBNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryAABBNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeometryAABBNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, aabbData, numAABBs, stride, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeometryAABBNV const & ) const = default; #else bool operator==( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( aabbData == rhs.aabbData ) && ( numAABBs == rhs.numAABBs ) && ( stride == rhs.stride ) && ( offset == rhs.offset ); # endif } bool operator!=( GeometryAABBNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeometryAabbNV; const void * pNext = {}; Buffer aabbData = {}; uint32_t numAABBs = {}; uint32_t stride = {}; DeviceSize offset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeometryAABBNV; }; #endif template <> struct CppType { using Type = GeometryAABBNV; }; // wrapper struct for struct VkGeometryDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryDataNV.html struct GeometryDataNV { using NativeType = VkGeometryDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryTrianglesNV triangles_ = {}, GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT : triangles{ triangles_ } , aabbs{ aabbs_ } { } VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryDataNV( *reinterpret_cast( &rhs ) ) {} GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setTriangles( GeometryTrianglesNV const & triangles_ ) & VULKAN_HPP_NOEXCEPT { triangles = triangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryDataNV && setTriangles( GeometryTrianglesNV const & triangles_ ) && VULKAN_HPP_NOEXCEPT { triangles = triangles_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & setAabbs( GeometryAABBNV const & aabbs_ ) & VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryDataNV && setAabbs( GeometryAABBNV const & aabbs_ ) && VULKAN_HPP_NOEXCEPT { aabbs = aabbs_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeometryDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( triangles, aabbs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeometryDataNV const & ) const = default; #else bool operator==( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( triangles == rhs.triangles ) && ( aabbs == rhs.aabbs ); # endif } bool operator!=( GeometryDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: GeometryTrianglesNV triangles = {}; GeometryAABBNV aabbs = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeometryDataNV; }; #endif // wrapper struct for struct VkGeometryNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeometryNV.html struct GeometryNV { using NativeType = VkGeometryNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryNV( GeometryTypeKHR geometryType_ = GeometryTypeKHR::eTriangles, GeometryDataNV geometry_ = {}, GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , geometryType{ geometryType_ } , geometry{ geometry_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeometryNV( *reinterpret_cast( &rhs ) ) {} GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeometryNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometryType( GeometryTypeKHR geometryType_ ) & VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV && setGeometryType( GeometryTypeKHR geometryType_ ) && VULKAN_HPP_NOEXCEPT { geometryType = geometryType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryNV & setGeometry( GeometryDataNV const & geometry_ ) & VULKAN_HPP_NOEXCEPT { geometry = geometry_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV && setGeometry( GeometryDataNV const & geometry_ ) && VULKAN_HPP_NOEXCEPT { geometry = geometry_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeometryNV & setFlags( GeometryFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeometryNV && setFlags( GeometryFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeometryNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeometryNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeometryNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, geometryType, geometry, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeometryNV const & ) const = default; #else bool operator==( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( geometryType == rhs.geometryType ) && ( geometry == rhs.geometry ) && ( flags == rhs.flags ); # endif } bool operator!=( GeometryNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeometryNV; const void * pNext = {}; GeometryTypeKHR geometryType = GeometryTypeKHR::eTriangles; GeometryDataNV geometry = {}; GeometryFlagsKHR flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeometryNV; }; #endif template <> struct CppType { using Type = GeometryNV; }; // wrapper struct for struct VkAccelerationStructureInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInfoNV.html struct AccelerationStructureInfoNV { using NativeType = VkAccelerationStructureInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureTypeNV type_ = {}, BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const GeometryNV * pGeometries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , flags{ flags_ } , instanceCount{ instanceCount_ } , geometryCount{ geometryCount_ } , pGeometries{ pGeometries_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureInfoNV( AccelerationStructureTypeNV type_, BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, ArrayProxyNoTemporaries const & geometries_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , type( type_ ) , flags( flags_ ) , instanceCount( instanceCount_ ) , geometryCount( static_cast( geometries_.size() ) ) , pGeometries( geometries_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setType( AccelerationStructureTypeNV type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setType( AccelerationStructureTypeNV type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setFlags( BuildAccelerationStructureFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setFlags( BuildAccelerationStructureFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) & VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setInstanceCount( uint32_t instanceCount_ ) && VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) & VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setGeometryCount( uint32_t geometryCount_ ) && VULKAN_HPP_NOEXCEPT { geometryCount = geometryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & setPGeometries( const GeometryNV * pGeometries_ ) & VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV && setPGeometries( const GeometryNV * pGeometries_ ) && VULKAN_HPP_NOEXCEPT { pGeometries = pGeometries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureInfoNV & setGeometries( ArrayProxyNoTemporaries const & geometries_ ) VULKAN_HPP_NOEXCEPT { geometryCount = static_cast( geometries_.size() ); pGeometries = geometries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, flags, instanceCount, geometryCount, pGeometries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureInfoNV const & ) const = default; #else bool operator==( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && ( geometryCount == rhs.geometryCount ) && ( pGeometries == rhs.pGeometries ); # endif } bool operator!=( AccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureInfoNV; const void * pNext = {}; AccelerationStructureTypeNV type = {}; BuildAccelerationStructureFlagsNV flags = {}; uint32_t instanceCount = {}; uint32_t geometryCount = {}; const GeometryNV * pGeometries = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureInfoNV; }; #endif template <> struct CppType { using Type = AccelerationStructureInfoNV; }; // wrapper struct for struct VkAccelerationStructureCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureCreateInfoNV.html struct AccelerationStructureCreateInfoNV { using NativeType = VkAccelerationStructureCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( DeviceSize compactedSize_ = {}, AccelerationStructureInfoNV info_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , compactedSize{ compactedSize_ } , info{ info_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureCreateInfoNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setCompactedSize( DeviceSize compactedSize_ ) & VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV && setCompactedSize( DeviceSize compactedSize_ ) && VULKAN_HPP_NOEXCEPT { compactedSize = compactedSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & setInfo( AccelerationStructureInfoNV const & info_ ) & VULKAN_HPP_NOEXCEPT { info = info_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV && setInfo( AccelerationStructureInfoNV const & info_ ) && VULKAN_HPP_NOEXCEPT { info = info_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, compactedSize, info ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureCreateInfoNV const & ) const = default; #else bool operator==( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compactedSize == rhs.compactedSize ) && ( info == rhs.info ); # endif } bool operator!=( AccelerationStructureCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; const void * pNext = {}; DeviceSize compactedSize = {}; AccelerationStructureInfoNV info = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureCreateInfoNV; }; #endif template <> struct CppType { using Type = AccelerationStructureCreateInfoNV; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX.html struct AccelerationStructureDenseGeometryFormatTrianglesDataAMDX { using NativeType = VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDenseGeometryFormatTrianglesDataAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( DeviceOrHostAddressConstKHR compressedData_ = {}, DeviceSize dataSize_ = {}, uint32_t numTriangles_ = {}, uint32_t numVertices_ = {}, uint32_t maxPrimitiveIndex_ = {}, uint32_t maxGeometryIndex_ = {}, CompressedTriangleFormatAMDX format_ = CompressedTriangleFormatAMDX::eDgf1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , compressedData{ compressedData_ } , dataSize{ dataSize_ } , numTriangles{ numTriangles_ } , numVertices{ numVertices_ } , maxPrimitiveIndex{ maxPrimitiveIndex_ } , maxGeometryIndex{ maxGeometryIndex_ } , format{ format_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureDenseGeometryFormatTrianglesDataAMDX( *reinterpret_cast( &rhs ) ) { } AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & operator=( AccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & operator=( VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setCompressedData( DeviceOrHostAddressConstKHR const & compressedData_ ) & VULKAN_HPP_NOEXCEPT { compressedData = compressedData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setCompressedData( DeviceOrHostAddressConstKHR const & compressedData_ ) && VULKAN_HPP_NOEXCEPT { compressedData = compressedData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setDataSize( DeviceSize dataSize_ ) & VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setDataSize( DeviceSize dataSize_ ) && VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setNumTriangles( uint32_t numTriangles_ ) & VULKAN_HPP_NOEXCEPT { numTriangles = numTriangles_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setNumTriangles( uint32_t numTriangles_ ) && VULKAN_HPP_NOEXCEPT { numTriangles = numTriangles_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setNumVertices( uint32_t numVertices_ ) & VULKAN_HPP_NOEXCEPT { numVertices = numVertices_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setNumVertices( uint32_t numVertices_ ) && VULKAN_HPP_NOEXCEPT { numVertices = numVertices_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setMaxPrimitiveIndex( uint32_t maxPrimitiveIndex_ ) & VULKAN_HPP_NOEXCEPT { maxPrimitiveIndex = maxPrimitiveIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setMaxPrimitiveIndex( uint32_t maxPrimitiveIndex_ ) && VULKAN_HPP_NOEXCEPT { maxPrimitiveIndex = maxPrimitiveIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setMaxGeometryIndex( uint32_t maxGeometryIndex_ ) & VULKAN_HPP_NOEXCEPT { maxGeometryIndex = maxGeometryIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setMaxGeometryIndex( uint32_t maxGeometryIndex_ ) && VULKAN_HPP_NOEXCEPT { maxGeometryIndex = maxGeometryIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX & setFormat( CompressedTriangleFormatAMDX format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDenseGeometryFormatTrianglesDataAMDX && setFormat( CompressedTriangleFormatAMDX format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureDenseGeometryFormatTrianglesDataAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, compressedData, dataSize, numTriangles, numVertices, maxPrimitiveIndex, maxGeometryIndex, format ); } # endif public: StructureType sType = StructureType::eAccelerationStructureDenseGeometryFormatTrianglesDataAMDX; const void * pNext = {}; DeviceOrHostAddressConstKHR compressedData = {}; DeviceSize dataSize = {}; uint32_t numTriangles = {}; uint32_t numVertices = {}; uint32_t maxPrimitiveIndex = {}; uint32_t maxGeometryIndex = {}; CompressedTriangleFormatAMDX format = CompressedTriangleFormatAMDX::eDgf1; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureDenseGeometryFormatTrianglesDataAMDX; }; # endif template <> struct CppType { using Type = AccelerationStructureDenseGeometryFormatTrianglesDataAMDX; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkAccelerationStructureDeviceAddressInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureDeviceAddressInfoKHR.html struct AccelerationStructureDeviceAddressInfoKHR { using NativeType = VkAccelerationStructureDeviceAddressInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureKHR accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , accelerationStructure{ accelerationStructure_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR && setAccelerationStructure( AccelerationStructureKHR accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureDeviceAddressInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureDeviceAddressInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureDeviceAddressInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, accelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const & ) const = default; #else bool operator==( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ); # endif } bool operator!=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR; const void * pNext = {}; AccelerationStructureKHR accelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureDeviceAddressInfoKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureDeviceAddressInfoKHR; }; // wrapper struct for struct VkAccelerationStructureGeometryLinearSweptSpheresDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryLinearSweptSpheresDataNV.html struct AccelerationStructureGeometryLinearSweptSpheresDataNV { using NativeType = VkAccelerationStructureGeometryLinearSweptSpheresDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV( Format vertexFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR vertexData_ = {}, DeviceSize vertexStride_ = {}, Format radiusFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR radiusData_ = {}, DeviceSize radiusStride_ = {}, IndexType indexType_ = IndexType::eUint16, DeviceOrHostAddressConstKHR indexData_ = {}, DeviceSize indexStride_ = {}, RayTracingLssIndexingModeNV indexingMode_ = RayTracingLssIndexingModeNV::eList, RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ = RayTracingLssPrimitiveEndCapsModeNV::eNone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexFormat{ vertexFormat_ } , vertexData{ vertexData_ } , vertexStride{ vertexStride_ } , radiusFormat{ radiusFormat_ } , radiusData{ radiusData_ } , radiusStride{ radiusStride_ } , indexType{ indexType_ } , indexData{ indexData_ } , indexStride{ indexStride_ } , indexingMode{ indexingMode_ } , endCapsMode{ endCapsMode_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryLinearSweptSpheresDataNV( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryLinearSweptSpheresDataNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( AccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryLinearSweptSpheresDataNV & operator=( VkAccelerationStructureGeometryLinearSweptSpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexFormat( Format vertexFormat_ ) & VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setVertexFormat( Format vertexFormat_ ) && VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) & VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) && VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setVertexStride( DeviceSize vertexStride_ ) & VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setVertexStride( DeviceSize vertexStride_ ) && VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusFormat( Format radiusFormat_ ) & VULKAN_HPP_NOEXCEPT { radiusFormat = radiusFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setRadiusFormat( Format radiusFormat_ ) && VULKAN_HPP_NOEXCEPT { radiusFormat = radiusFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) & VULKAN_HPP_NOEXCEPT { radiusData = radiusData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) && VULKAN_HPP_NOEXCEPT { radiusData = radiusData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setRadiusStride( DeviceSize radiusStride_ ) & VULKAN_HPP_NOEXCEPT { radiusStride = radiusStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setRadiusStride( DeviceSize radiusStride_ ) && VULKAN_HPP_NOEXCEPT { radiusStride = radiusStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) & VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) && VULKAN_HPP_NOEXCEPT { indexData = indexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexStride( DeviceSize indexStride_ ) & VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setIndexStride( DeviceSize indexStride_ ) && VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setIndexingMode( RayTracingLssIndexingModeNV indexingMode_ ) & VULKAN_HPP_NOEXCEPT { indexingMode = indexingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setIndexingMode( RayTracingLssIndexingModeNV indexingMode_ ) && VULKAN_HPP_NOEXCEPT { indexingMode = indexingMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV & setEndCapsMode( RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) & VULKAN_HPP_NOEXCEPT { endCapsMode = endCapsMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryLinearSweptSpheresDataNV && setEndCapsMode( RayTracingLssPrimitiveEndCapsModeNV endCapsMode_ ) && VULKAN_HPP_NOEXCEPT { endCapsMode = endCapsMode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryLinearSweptSpheresDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride, indexingMode, endCapsMode ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryLinearSweptSpheresDataNV; const void * pNext = {}; Format vertexFormat = Format::eUndefined; DeviceOrHostAddressConstKHR vertexData = {}; DeviceSize vertexStride = {}; Format radiusFormat = Format::eUndefined; DeviceOrHostAddressConstKHR radiusData = {}; DeviceSize radiusStride = {}; IndexType indexType = IndexType::eUint16; DeviceOrHostAddressConstKHR indexData = {}; DeviceSize indexStride = {}; RayTracingLssIndexingModeNV indexingMode = RayTracingLssIndexingModeNV::eList; RayTracingLssPrimitiveEndCapsModeNV endCapsMode = RayTracingLssPrimitiveEndCapsModeNV::eNone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryLinearSweptSpheresDataNV; }; // wrapper struct for struct VkAccelerationStructureGeometryMotionTrianglesDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometryMotionTrianglesDataNV.html struct AccelerationStructureGeometryMotionTrianglesDataNV { using NativeType = VkAccelerationStructureGeometryMotionTrianglesDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( DeviceOrHostAddressConstKHR vertexData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexData{ vertexData_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometryMotionTrianglesDataNV( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometryMotionTrianglesDataNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometryMotionTrianglesDataNV & operator=( AccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometryMotionTrianglesDataNV & operator=( VkAccelerationStructureGeometryMotionTrianglesDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) & VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV && setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) && VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometryMotionTrianglesDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryMotionTrianglesDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometryMotionTrianglesDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometryMotionTrianglesDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexData ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometryMotionTrianglesDataNV; const void * pNext = {}; DeviceOrHostAddressConstKHR vertexData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometryMotionTrianglesDataNV; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometryMotionTrianglesDataNV; }; // wrapper struct for struct VkAccelerationStructureGeometrySpheresDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureGeometrySpheresDataNV.html struct AccelerationStructureGeometrySpheresDataNV { using NativeType = VkAccelerationStructureGeometrySpheresDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometrySpheresDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( Format vertexFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR vertexData_ = {}, DeviceSize vertexStride_ = {}, Format radiusFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR radiusData_ = {}, DeviceSize radiusStride_ = {}, IndexType indexType_ = IndexType::eUint16, DeviceOrHostAddressConstKHR indexData_ = {}, DeviceSize indexStride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexFormat{ vertexFormat_ } , vertexData{ vertexData_ } , vertexStride{ vertexStride_ } , radiusFormat{ radiusFormat_ } , radiusData{ radiusData_ } , radiusStride{ radiusStride_ } , indexType{ indexType_ } , indexData{ indexData_ } , indexStride{ indexStride_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureGeometrySpheresDataNV( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureGeometrySpheresDataNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureGeometrySpheresDataNV & operator=( AccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureGeometrySpheresDataNV & operator=( VkAccelerationStructureGeometrySpheresDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexFormat( Format vertexFormat_ ) & VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setVertexFormat( Format vertexFormat_ ) && VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) & VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setVertexData( DeviceOrHostAddressConstKHR const & vertexData_ ) && VULKAN_HPP_NOEXCEPT { vertexData = vertexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setVertexStride( DeviceSize vertexStride_ ) & VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setVertexStride( DeviceSize vertexStride_ ) && VULKAN_HPP_NOEXCEPT { vertexStride = vertexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusFormat( Format radiusFormat_ ) & VULKAN_HPP_NOEXCEPT { radiusFormat = radiusFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setRadiusFormat( Format radiusFormat_ ) && VULKAN_HPP_NOEXCEPT { radiusFormat = radiusFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) & VULKAN_HPP_NOEXCEPT { radiusData = radiusData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setRadiusData( DeviceOrHostAddressConstKHR const & radiusData_ ) && VULKAN_HPP_NOEXCEPT { radiusData = radiusData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setRadiusStride( DeviceSize radiusStride_ ) & VULKAN_HPP_NOEXCEPT { radiusStride = radiusStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setRadiusStride( DeviceSize radiusStride_ ) && VULKAN_HPP_NOEXCEPT { radiusStride = radiusStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) & VULKAN_HPP_NOEXCEPT { indexData = indexData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setIndexData( DeviceOrHostAddressConstKHR const & indexData_ ) && VULKAN_HPP_NOEXCEPT { indexData = indexData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV & setIndexStride( DeviceSize indexStride_ ) & VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometrySpheresDataNV && setIndexStride( DeviceSize indexStride_ ) && VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureGeometrySpheresDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometrySpheresDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureGeometrySpheresDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureGeometrySpheresDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexFormat, vertexData, vertexStride, radiusFormat, radiusData, radiusStride, indexType, indexData, indexStride ); } #endif public: StructureType sType = StructureType::eAccelerationStructureGeometrySpheresDataNV; const void * pNext = {}; Format vertexFormat = Format::eUndefined; DeviceOrHostAddressConstKHR vertexData = {}; DeviceSize vertexStride = {}; Format radiusFormat = Format::eUndefined; DeviceOrHostAddressConstKHR radiusData = {}; DeviceSize radiusStride = {}; IndexType indexType = IndexType::eUint16; DeviceOrHostAddressConstKHR indexData = {}; DeviceSize indexStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureGeometrySpheresDataNV; }; #endif template <> struct CppType { using Type = AccelerationStructureGeometrySpheresDataNV; }; // wrapper struct for struct VkTransformMatrixKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTransformMatrixKHR.html struct TransformMatrixKHR { using NativeType = VkTransformMatrixKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix{ matrix_ } {} VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT : TransformMatrixKHR( *reinterpret_cast( &rhs ) ) {} TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & setMatrix( std::array, 3> matrix_ ) & VULKAN_HPP_NOEXCEPT { matrix = matrix_; return *this; } VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR && setMatrix( std::array, 3> matrix_ ) && VULKAN_HPP_NOEXCEPT { matrix = matrix_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkTransformMatrixKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTransformMatrixKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkTransformMatrixKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( matrix ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( TransformMatrixKHR const & ) const = default; #else bool operator==( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( matrix == rhs.matrix ); # endif } bool operator!=( TransformMatrixKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ArrayWrapper2D matrix = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = TransformMatrixKHR; }; #endif using TransformMatrixNV = TransformMatrixKHR; // wrapper struct for struct VkAccelerationStructureInstanceKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureInstanceKHR.html struct AccelerationStructureInstanceKHR { using NativeType = VkAccelerationStructureInstanceKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT : transform{ transform_ } , instanceCustomIndex{ instanceCustomIndex_ } , mask{ mask_ } , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } , flags{ flags_ } , accelerationStructureReference{ accelerationStructureReference_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureInstanceKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setTransform( TransformMatrixKHR const & transform_ ) & VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR && setTransform( TransformMatrixKHR const & transform_ ) && VULKAN_HPP_NOEXCEPT { transform = transform_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR && setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) & VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR && setMask( uint32_t mask_ ) && VULKAN_HPP_NOEXCEPT { mask = mask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) & VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR && setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) && VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return std::move( *this ); } AccelerationStructureInstanceKHR & setFlags( GeometryInstanceFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return *this; } AccelerationStructureInstanceKHR && setFlags( GeometryInstanceFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR && setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureInstanceKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureInstanceKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureInstanceKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( transform, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureInstanceKHR const & ) const = default; #else bool operator==( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( transform == rhs.transform ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); # endif } bool operator!=( AccelerationStructureInstanceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: TransformMatrixKHR transform = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureInstanceKHR; }; #endif using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR; // wrapper struct for struct VkAccelerationStructureMatrixMotionInstanceNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMatrixMotionInstanceNV.html struct AccelerationStructureMatrixMotionInstanceNV { using NativeType = VkAccelerationStructureMatrixMotionInstanceNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( TransformMatrixKHR transformT0_ = {}, TransformMatrixKHR transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT : transformT0{ transformT0_ } , transformT1{ transformT1_ } , instanceCustomIndex{ instanceCustomIndex_ } , mask{ mask_ } , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } , flags{ flags_ } , accelerationStructureReference{ accelerationStructureReference_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMatrixMotionInstanceNV( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureMatrixMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureMatrixMotionInstanceNV & operator=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMatrixMotionInstanceNV & operator=( VkAccelerationStructureMatrixMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT0( TransformMatrixKHR const & transformT0_ ) & VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setTransformT0( TransformMatrixKHR const & transformT0_ ) && VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setTransformT1( TransformMatrixKHR const & transformT1_ ) & VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setTransformT1( TransformMatrixKHR const & transformT1_ ) && VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setMask( uint32_t mask_ ) & VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setMask( uint32_t mask_ ) && VULKAN_HPP_NOEXCEPT { mask = mask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) & VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) && VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return std::move( *this ); } AccelerationStructureMatrixMotionInstanceNV & setFlags( GeometryInstanceFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return *this; } AccelerationStructureMatrixMotionInstanceNV && setFlags( GeometryInstanceFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMatrixMotionInstanceNV && setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMatrixMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMatrixMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMatrixMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureMatrixMotionInstanceNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureMatrixMotionInstanceNV const & ) const = default; #else bool operator==( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); # endif } bool operator!=( AccelerationStructureMatrixMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: TransformMatrixKHR transformT0 = {}; TransformMatrixKHR transformT1 = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureMatrixMotionInstanceNV; }; #endif // wrapper struct for struct VkAccelerationStructureMemoryRequirementsInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html struct AccelerationStructureMemoryRequirementsInfoNV { using NativeType = VkAccelerationStructureMemoryRequirementsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsTypeNV type_ = AccelerationStructureMemoryRequirementsTypeNV::eObject, AccelerationStructureNV accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , accelerationStructure{ accelerationStructure_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setType( AccelerationStructureMemoryRequirementsTypeNV type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV && setType( AccelerationStructureMemoryRequirementsTypeNV type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV && setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, accelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const & ) const = default; #else bool operator==( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( accelerationStructure == rhs.accelerationStructure ); # endif } bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; const void * pNext = {}; AccelerationStructureMemoryRequirementsTypeNV type = AccelerationStructureMemoryRequirementsTypeNV::eObject; AccelerationStructureNV accelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureMemoryRequirementsInfoNV; }; #endif template <> struct CppType { using Type = AccelerationStructureMemoryRequirementsInfoNV; }; // wrapper struct for struct VkAccelerationStructureMotionInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInfoNV.html struct AccelerationStructureMotionInfoNV { using NativeType = VkAccelerationStructureMotionInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMotionInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {}, AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxInstances{ maxInstances_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInfoNV( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureMotionInfoNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureMotionInfoNV & operator=( AccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMotionInfoNV & operator=( VkAccelerationStructureMotionInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setMaxInstances( uint32_t maxInstances_ ) & VULKAN_HPP_NOEXCEPT { maxInstances = maxInstances_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV && setMaxInstances( uint32_t maxInstances_ ) && VULKAN_HPP_NOEXCEPT { maxInstances = maxInstances_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV & setFlags( AccelerationStructureMotionInfoFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInfoNV && setFlags( AccelerationStructureMotionInfoFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMotionInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMotionInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureMotionInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxInstances, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureMotionInfoNV const & ) const = default; #else bool operator==( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInstances == rhs.maxInstances ) && ( flags == rhs.flags ); # endif } bool operator!=( AccelerationStructureMotionInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureMotionInfoNV; const void * pNext = {}; uint32_t maxInstances = {}; AccelerationStructureMotionInfoFlagsNV flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureMotionInfoNV; }; #endif template <> struct CppType { using Type = AccelerationStructureMotionInfoNV; }; // wrapper struct for struct VkSRTDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSRTDataNV.html struct SRTDataNV { using NativeType = VkSRTDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SRTDataNV( float sx_ = {}, float a_ = {}, float b_ = {}, float pvx_ = {}, float sy_ = {}, float c_ = {}, float pvy_ = {}, float sz_ = {}, float pvz_ = {}, float qx_ = {}, float qy_ = {}, float qz_ = {}, float qw_ = {}, float tx_ = {}, float ty_ = {}, float tz_ = {} ) VULKAN_HPP_NOEXCEPT : sx{ sx_ } , a{ a_ } , b{ b_ } , pvx{ pvx_ } , sy{ sy_ } , c{ c_ } , pvy{ pvy_ } , sz{ sz_ } , pvz{ pvz_ } , qx{ qx_ } , qy{ qy_ } , qz{ qz_ } , qw{ qw_ } , tx{ tx_ } , ty{ ty_ } , tz{ tz_ } { } VULKAN_HPP_CONSTEXPR SRTDataNV( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; SRTDataNV( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : SRTDataNV( *reinterpret_cast( &rhs ) ) {} SRTDataNV & operator=( SRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SRTDataNV & operator=( VkSRTDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSx( float sx_ ) & VULKAN_HPP_NOEXCEPT { sx = sx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setSx( float sx_ ) && VULKAN_HPP_NOEXCEPT { sx = sx_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setA( float a_ ) & VULKAN_HPP_NOEXCEPT { a = a_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setA( float a_ ) && VULKAN_HPP_NOEXCEPT { a = a_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setB( float b_ ) & VULKAN_HPP_NOEXCEPT { b = b_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setB( float b_ ) && VULKAN_HPP_NOEXCEPT { b = b_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvx( float pvx_ ) & VULKAN_HPP_NOEXCEPT { pvx = pvx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setPvx( float pvx_ ) && VULKAN_HPP_NOEXCEPT { pvx = pvx_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSy( float sy_ ) & VULKAN_HPP_NOEXCEPT { sy = sy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setSy( float sy_ ) && VULKAN_HPP_NOEXCEPT { sy = sy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setC( float c_ ) & VULKAN_HPP_NOEXCEPT { c = c_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setC( float c_ ) && VULKAN_HPP_NOEXCEPT { c = c_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvy( float pvy_ ) & VULKAN_HPP_NOEXCEPT { pvy = pvy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setPvy( float pvy_ ) && VULKAN_HPP_NOEXCEPT { pvy = pvy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setSz( float sz_ ) & VULKAN_HPP_NOEXCEPT { sz = sz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setSz( float sz_ ) && VULKAN_HPP_NOEXCEPT { sz = sz_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setPvz( float pvz_ ) & VULKAN_HPP_NOEXCEPT { pvz = pvz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setPvz( float pvz_ ) && VULKAN_HPP_NOEXCEPT { pvz = pvz_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQx( float qx_ ) & VULKAN_HPP_NOEXCEPT { qx = qx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setQx( float qx_ ) && VULKAN_HPP_NOEXCEPT { qx = qx_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQy( float qy_ ) & VULKAN_HPP_NOEXCEPT { qy = qy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setQy( float qy_ ) && VULKAN_HPP_NOEXCEPT { qy = qy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQz( float qz_ ) & VULKAN_HPP_NOEXCEPT { qz = qz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setQz( float qz_ ) && VULKAN_HPP_NOEXCEPT { qz = qz_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setQw( float qw_ ) & VULKAN_HPP_NOEXCEPT { qw = qw_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setQw( float qw_ ) && VULKAN_HPP_NOEXCEPT { qw = qw_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTx( float tx_ ) & VULKAN_HPP_NOEXCEPT { tx = tx_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setTx( float tx_ ) && VULKAN_HPP_NOEXCEPT { tx = tx_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTy( float ty_ ) & VULKAN_HPP_NOEXCEPT { ty = ty_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setTy( float ty_ ) && VULKAN_HPP_NOEXCEPT { ty = ty_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SRTDataNV & setTz( float tz_ ) & VULKAN_HPP_NOEXCEPT { tz = tz_; return *this; } VULKAN_HPP_CONSTEXPR_14 SRTDataNV && setTz( float tz_ ) && VULKAN_HPP_NOEXCEPT { tz = tz_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSRTDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSRTDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSRTDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSRTDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sx, a, b, pvx, sy, c, pvy, sz, pvz, qx, qy, qz, qw, tx, ty, tz ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SRTDataNV const & ) const = default; #else bool operator==( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sx == rhs.sx ) && ( a == rhs.a ) && ( b == rhs.b ) && ( pvx == rhs.pvx ) && ( sy == rhs.sy ) && ( c == rhs.c ) && ( pvy == rhs.pvy ) && ( sz == rhs.sz ) && ( pvz == rhs.pvz ) && ( qx == rhs.qx ) && ( qy == rhs.qy ) && ( qz == rhs.qz ) && ( qw == rhs.qw ) && ( tx == rhs.tx ) && ( ty == rhs.ty ) && ( tz == rhs.tz ); # endif } bool operator!=( SRTDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float sx = {}; float a = {}; float b = {}; float pvx = {}; float sy = {}; float c = {}; float pvy = {}; float sz = {}; float pvz = {}; float qx = {}; float qy = {}; float qz = {}; float qw = {}; float tx = {}; float ty = {}; float tz = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SRTDataNV; }; #endif // wrapper struct for struct VkAccelerationStructureSRTMotionInstanceNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureSRTMotionInstanceNV.html struct AccelerationStructureSRTMotionInstanceNV { using NativeType = VkAccelerationStructureSRTMotionInstanceNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( SRTDataNV transformT0_ = {}, SRTDataNV transformT1_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT : transformT0{ transformT0_ } , transformT1{ transformT1_ } , instanceCustomIndex{ instanceCustomIndex_ } , mask{ mask_ } , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } , flags{ flags_ } , accelerationStructureReference{ accelerationStructureReference_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureSRTMotionInstanceNV( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureSRTMotionInstanceNV( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureSRTMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureSRTMotionInstanceNV & operator=( AccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureSRTMotionInstanceNV & operator=( VkAccelerationStructureSRTMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT0( SRTDataNV const & transformT0_ ) & VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setTransformT0( SRTDataNV const & transformT0_ ) && VULKAN_HPP_NOEXCEPT { transformT0 = transformT0_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setTransformT1( SRTDataNV const & transformT1_ ) & VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setTransformT1( SRTDataNV const & transformT1_ ) && VULKAN_HPP_NOEXCEPT { transformT1 = transformT1_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceCustomIndex = instanceCustomIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setMask( uint32_t mask_ ) & VULKAN_HPP_NOEXCEPT { mask = mask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setMask( uint32_t mask_ ) && VULKAN_HPP_NOEXCEPT { mask = mask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) & VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) && VULKAN_HPP_NOEXCEPT { instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_; return std::move( *this ); } AccelerationStructureSRTMotionInstanceNV & setFlags( GeometryInstanceFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return *this; } AccelerationStructureSRTMotionInstanceNV && setFlags( GeometryInstanceFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = *reinterpret_cast( &flags_ ); return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureSRTMotionInstanceNV && setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureReference = accelerationStructureReference_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureSRTMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureSRTMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureSRTMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureSRTMotionInstanceNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( transformT0, transformT1, instanceCustomIndex, mask, instanceShaderBindingTableRecordOffset, flags, accelerationStructureReference ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureSRTMotionInstanceNV const & ) const = default; #else bool operator==( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( transformT0 == rhs.transformT0 ) && ( transformT1 == rhs.transformT1 ) && ( instanceCustomIndex == rhs.instanceCustomIndex ) && ( mask == rhs.mask ) && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset ) && ( flags == rhs.flags ) && ( accelerationStructureReference == rhs.accelerationStructureReference ); # endif } bool operator!=( AccelerationStructureSRTMotionInstanceNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: SRTDataNV transformT0 = {}; SRTDataNV transformT1 = {}; uint32_t instanceCustomIndex : 24; uint32_t mask : 8; uint32_t instanceShaderBindingTableRecordOffset : 24; VkGeometryInstanceFlagsKHR flags : 8; uint64_t accelerationStructureReference = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureSRTMotionInstanceNV; }; #endif union AccelerationStructureMotionInstanceDataNV { using NativeType = VkAccelerationStructureMotionInstanceDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureInstanceKHR staticInstance_ = {} ) : staticInstance( staticInstance_ ) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance_ ) : matrixMotionInstance( matrixMotionInstance_ ) { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV( AccelerationStructureSRTMotionInstanceNV srtMotionInstance_ ) : srtMotionInstance( srtMotionInstance_ ) { } #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setStaticInstance( AccelerationStructureInstanceKHR const & staticInstance_ ) & VULKAN_HPP_NOEXCEPT { staticInstance = staticInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV && setStaticInstance( AccelerationStructureInstanceKHR const & staticInstance_ ) && VULKAN_HPP_NOEXCEPT { staticInstance = staticInstance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setMatrixMotionInstance( AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) & VULKAN_HPP_NOEXCEPT { matrixMotionInstance = matrixMotionInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV && setMatrixMotionInstance( AccelerationStructureMatrixMotionInstanceNV const & matrixMotionInstance_ ) && VULKAN_HPP_NOEXCEPT { matrixMotionInstance = matrixMotionInstance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV & setSrtMotionInstance( AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) & VULKAN_HPP_NOEXCEPT { srtMotionInstance = srtMotionInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceDataNV && setSrtMotionInstance( AccelerationStructureSRTMotionInstanceNV const & srtMotionInstance_ ) && VULKAN_HPP_NOEXCEPT { srtMotionInstance = srtMotionInstance_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInstanceDataNV const &() const { return *reinterpret_cast( this ); } operator VkAccelerationStructureMotionInstanceDataNV &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS AccelerationStructureInstanceKHR staticInstance; AccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; AccelerationStructureSRTMotionInstanceNV srtMotionInstance; #else VkAccelerationStructureInstanceKHR staticInstance; VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureMotionInstanceDataNV; }; #endif // wrapper struct for struct VkAccelerationStructureMotionInstanceNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureMotionInstanceNV.html struct AccelerationStructureMotionInstanceNV { using NativeType = VkAccelerationStructureMotionInstanceNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceTypeNV type_ = AccelerationStructureMotionInstanceTypeNV::eStatic, AccelerationStructureMotionInstanceFlagsNV flags_ = {}, AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT : type{ type_ } , flags{ flags_ } , data{ data_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureMotionInstanceNV( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureMotionInstanceNV( *reinterpret_cast( &rhs ) ) { } AccelerationStructureMotionInstanceNV & operator=( AccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureMotionInstanceNV & operator=( VkAccelerationStructureMotionInstanceNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setType( AccelerationStructureMotionInstanceTypeNV type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV && setType( AccelerationStructureMotionInstanceTypeNV type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setFlags( AccelerationStructureMotionInstanceFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV && setFlags( AccelerationStructureMotionInstanceFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV & setData( AccelerationStructureMotionInstanceDataNV const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMotionInstanceNV && setData( AccelerationStructureMotionInstanceDataNV const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureMotionInstanceNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMotionInstanceNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureMotionInstanceNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureMotionInstanceNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( type, flags, data ); } #endif public: AccelerationStructureMotionInstanceTypeNV type = AccelerationStructureMotionInstanceTypeNV::eStatic; AccelerationStructureMotionInstanceFlagsNV flags = {}; AccelerationStructureMotionInstanceDataNV data = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureMotionInstanceNV; }; #endif // wrapper struct for struct VkMicromapUsageEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapUsageEXT.html struct MicromapUsageEXT { using NativeType = VkMicromapUsageEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT : count{ count_ } , subdivisionLevel{ subdivisionLevel_ } , format{ format_ } { } VULKAN_HPP_CONSTEXPR MicromapUsageEXT( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapUsageEXT( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapUsageEXT( *reinterpret_cast( &rhs ) ) {} MicromapUsageEXT & operator=( MicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapUsageEXT & operator=( VkMicromapUsageEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setCount( uint32_t count_ ) & VULKAN_HPP_NOEXCEPT { count = count_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT && setCount( uint32_t count_ ) && VULKAN_HPP_NOEXCEPT { count = count_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setSubdivisionLevel( uint32_t subdivisionLevel_ ) & VULKAN_HPP_NOEXCEPT { subdivisionLevel = subdivisionLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT && setSubdivisionLevel( uint32_t subdivisionLevel_ ) && VULKAN_HPP_NOEXCEPT { subdivisionLevel = subdivisionLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT & setFormat( uint32_t format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapUsageEXT && setFormat( uint32_t format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapUsageEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapUsageEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapUsageEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapUsageEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( count, subdivisionLevel, format ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MicromapUsageEXT const & ) const = default; #else bool operator==( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( count == rhs.count ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); # endif } bool operator!=( MicromapUsageEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t count = {}; uint32_t subdivisionLevel = {}; uint32_t format = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapUsageEXT; }; #endif #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkAccelerationStructureTrianglesDisplacementMicromapNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesDisplacementMicromapNV.html struct AccelerationStructureTrianglesDisplacementMicromapNV { using NativeType = VkAccelerationStructureTrianglesDisplacementMicromapNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( Format displacementBiasAndScaleFormat_ = Format::eUndefined, Format displacementVectorFormat_ = Format::eUndefined, DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_ = {}, DeviceSize displacementBiasAndScaleStride_ = {}, DeviceOrHostAddressConstKHR displacementVectorBuffer_ = {}, DeviceSize displacementVectorStride_ = {}, DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_ = {}, DeviceSize displacedMicromapPrimitiveFlagsStride_ = {}, IndexType indexType_ = IndexType::eUint16, DeviceOrHostAddressConstKHR indexBuffer_ = {}, DeviceSize indexStride_ = {}, uint32_t baseTriangle_ = {}, uint32_t usageCountsCount_ = {}, const MicromapUsageEXT * pUsageCounts_ = {}, const MicromapUsageEXT * const * ppUsageCounts_ = {}, MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ } , displacementVectorFormat{ displacementVectorFormat_ } , displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ } , displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ } , displacementVectorBuffer{ displacementVectorBuffer_ } , displacementVectorStride{ displacementVectorStride_ } , displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ } , displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ } , indexType{ indexType_ } , indexBuffer{ indexBuffer_ } , indexStride{ indexStride_ } , baseTriangle{ baseTriangle_ } , usageCountsCount{ usageCountsCount_ } , pUsageCounts{ pUsageCounts_ } , ppUsageCounts{ ppUsageCounts_ } , micromap{ micromap_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureTrianglesDisplacementMicromapNV( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureTrianglesDisplacementMicromapNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesDisplacementMicromapNV( Format displacementBiasAndScaleFormat_, Format displacementVectorFormat_, DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer_, DeviceSize displacementBiasAndScaleStride_, DeviceOrHostAddressConstKHR displacementVectorBuffer_, DeviceSize displacementVectorStride_, DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags_, DeviceSize displacedMicromapPrimitiveFlagsStride_, IndexType indexType_, DeviceOrHostAddressConstKHR indexBuffer_, DeviceSize indexStride_, uint32_t baseTriangle_, ArrayProxyNoTemporaries const & usageCounts_, ArrayProxyNoTemporaries const & pUsageCounts_ = {}, MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) : pNext( pNext_ ) , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) , displacementVectorFormat( displacementVectorFormat_ ) , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) , displacementVectorBuffer( displacementVectorBuffer_ ) , displacementVectorStride( displacementVectorStride_ ) , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) , indexType( indexType_ ) , indexBuffer( indexBuffer_ ) , indexStride( indexStride_ ) , baseTriangle( baseTriangle_ ) , usageCountsCount( static_cast( usageCounts_.size() ) ) , pUsageCounts( usageCounts_.data() ) , ppUsageCounts( pUsageCounts_.data() ) , micromap( micromap_ ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( usageCounts_.empty() || pUsageCounts_.empty() || ( usageCounts_.size() == pUsageCounts_.size() ) ); # else if ( !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureTrianglesDisplacementMicromapNV::AccelerationStructureTrianglesDisplacementMicromapNV: !usageCounts_.empty() && !pUsageCounts_.empty() && ( usageCounts_.size() != pUsageCounts_.size() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ AccelerationStructureTrianglesDisplacementMicromapNV & operator=( AccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureTrianglesDisplacementMicromapNV & operator=( VkAccelerationStructureTrianglesDisplacementMicromapNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleFormat( Format displacementBiasAndScaleFormat_ ) & VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementBiasAndScaleFormat( Format displacementBiasAndScaleFormat_ ) && VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleFormat = displacementBiasAndScaleFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorFormat( Format displacementVectorFormat_ ) & VULKAN_HPP_NOEXCEPT { displacementVectorFormat = displacementVectorFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementVectorFormat( Format displacementVectorFormat_ ) && VULKAN_HPP_NOEXCEPT { displacementVectorFormat = displacementVectorFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleBuffer( DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) & VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementBiasAndScaleBuffer( DeviceOrHostAddressConstKHR const & displacementBiasAndScaleBuffer_ ) && VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleBuffer = displacementBiasAndScaleBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementBiasAndScaleStride( DeviceSize displacementBiasAndScaleStride_ ) & VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleStride = displacementBiasAndScaleStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementBiasAndScaleStride( DeviceSize displacementBiasAndScaleStride_ ) && VULKAN_HPP_NOEXCEPT { displacementBiasAndScaleStride = displacementBiasAndScaleStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorBuffer( DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) & VULKAN_HPP_NOEXCEPT { displacementVectorBuffer = displacementVectorBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementVectorBuffer( DeviceOrHostAddressConstKHR const & displacementVectorBuffer_ ) && VULKAN_HPP_NOEXCEPT { displacementVectorBuffer = displacementVectorBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacementVectorStride( DeviceSize displacementVectorStride_ ) & VULKAN_HPP_NOEXCEPT { displacementVectorStride = displacementVectorStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacementVectorStride( DeviceSize displacementVectorStride_ ) && VULKAN_HPP_NOEXCEPT { displacementVectorStride = displacementVectorStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacedMicromapPrimitiveFlags( DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) & VULKAN_HPP_NOEXCEPT { displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacedMicromapPrimitiveFlags( DeviceOrHostAddressConstKHR const & displacedMicromapPrimitiveFlags_ ) && VULKAN_HPP_NOEXCEPT { displacedMicromapPrimitiveFlags = displacedMicromapPrimitiveFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setDisplacedMicromapPrimitiveFlagsStride( DeviceSize displacedMicromapPrimitiveFlagsStride_ ) & VULKAN_HPP_NOEXCEPT { displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setDisplacedMicromapPrimitiveFlagsStride( DeviceSize displacedMicromapPrimitiveFlagsStride_ ) && VULKAN_HPP_NOEXCEPT { displacedMicromapPrimitiveFlagsStride = displacedMicromapPrimitiveFlagsStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) & VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) && VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setIndexStride( DeviceSize indexStride_ ) & VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setIndexStride( DeviceSize indexStride_ ) && VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setBaseTriangle( uint32_t baseTriangle_ ) & VULKAN_HPP_NOEXCEPT { baseTriangle = baseTriangle_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setBaseTriangle( uint32_t baseTriangle_ ) && VULKAN_HPP_NOEXCEPT { baseTriangle = baseTriangle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCountsCount( uint32_t usageCountsCount_ ) & VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setUsageCountsCount( uint32_t usageCountsCount_ ) && VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesDisplacementMicromapNV & setUsageCounts( ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( usageCounts_.size() ); pUsageCounts = usageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesDisplacementMicromapNV & setPUsageCounts( ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( pUsageCounts_.size() ); ppUsageCounts = pUsageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV & setMicromap( MicromapEXT micromap_ ) & VULKAN_HPP_NOEXCEPT { micromap = micromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesDisplacementMicromapNV && setMicromap( MicromapEXT micromap_ ) && VULKAN_HPP_NOEXCEPT { micromap = micromap_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureTrianglesDisplacementMicromapNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesDisplacementMicromapNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesDisplacementMicromapNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesDisplacementMicromapNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displacementBiasAndScaleFormat, displacementVectorFormat, displacementBiasAndScaleBuffer, displacementBiasAndScaleStride, displacementVectorBuffer, displacementVectorStride, displacedMicromapPrimitiveFlags, displacedMicromapPrimitiveFlagsStride, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap ); } # endif public: StructureType sType = StructureType::eAccelerationStructureTrianglesDisplacementMicromapNV; void * pNext = {}; Format displacementBiasAndScaleFormat = Format::eUndefined; Format displacementVectorFormat = Format::eUndefined; DeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer = {}; DeviceSize displacementBiasAndScaleStride = {}; DeviceOrHostAddressConstKHR displacementVectorBuffer = {}; DeviceSize displacementVectorStride = {}; DeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags = {}; DeviceSize displacedMicromapPrimitiveFlagsStride = {}; IndexType indexType = IndexType::eUint16; DeviceOrHostAddressConstKHR indexBuffer = {}; DeviceSize indexStride = {}; uint32_t baseTriangle = {}; uint32_t usageCountsCount = {}; const MicromapUsageEXT * pUsageCounts = {}; const MicromapUsageEXT * const * ppUsageCounts = {}; MicromapEXT micromap = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureTrianglesDisplacementMicromapNV; }; # endif template <> struct CppType { using Type = AccelerationStructureTrianglesDisplacementMicromapNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkAccelerationStructureTrianglesOpacityMicromapEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureTrianglesOpacityMicromapEXT.html struct AccelerationStructureTrianglesOpacityMicromapEXT { using NativeType = VkAccelerationStructureTrianglesOpacityMicromapEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT( IndexType indexType_ = IndexType::eUint16, DeviceOrHostAddressConstKHR indexBuffer_ = {}, DeviceSize indexStride_ = {}, uint32_t baseTriangle_ = {}, uint32_t usageCountsCount_ = {}, const MicromapUsageEXT * pUsageCounts_ = {}, const MicromapUsageEXT * const * ppUsageCounts_ = {}, MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indexType{ indexType_ } , indexBuffer{ indexBuffer_ } , indexStride{ indexStride_ } , baseTriangle{ baseTriangle_ } , usageCountsCount{ usageCountsCount_ } , pUsageCounts{ pUsageCounts_ } , ppUsageCounts{ ppUsageCounts_ } , micromap{ micromap_ } { } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureTrianglesOpacityMicromapEXT( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureTrianglesOpacityMicromapEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesOpacityMicromapEXT( IndexType indexType_, DeviceOrHostAddressConstKHR indexBuffer_, DeviceSize indexStride_, uint32_t baseTriangle_, ArrayProxyNoTemporaries const & usageCounts_, ArrayProxyNoTemporaries const & pUsageCounts_ = {}, MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) : pNext( pNext_ ) , indexType( indexType_ ) , indexBuffer( indexBuffer_ ) , indexStride( indexStride_ ) , baseTriangle( baseTriangle_ ) , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) , pUsageCounts( usageCounts_.data() ) , ppUsageCounts( pUsageCounts_.data() ) , micromap( micromap_ ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); # else if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureTrianglesOpacityMicromapEXT::AccelerationStructureTrianglesOpacityMicromapEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( AccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureTrianglesOpacityMicromapEXT & operator=( VkAccelerationStructureTrianglesOpacityMicromapEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) & VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setIndexBuffer( DeviceOrHostAddressConstKHR const & indexBuffer_ ) && VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setIndexStride( DeviceSize indexStride_ ) & VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setIndexStride( DeviceSize indexStride_ ) && VULKAN_HPP_NOEXCEPT { indexStride = indexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setBaseTriangle( uint32_t baseTriangle_ ) & VULKAN_HPP_NOEXCEPT { baseTriangle = baseTriangle_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setBaseTriangle( uint32_t baseTriangle_ ) && VULKAN_HPP_NOEXCEPT { baseTriangle = baseTriangle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) & VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setUsageCountsCount( uint32_t usageCountsCount_ ) && VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesOpacityMicromapEXT & setUsageCounts( ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( usageCounts_.size() ); pUsageCounts = usageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AccelerationStructureTrianglesOpacityMicromapEXT & setPUsageCounts( ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( pUsageCounts_.size() ); ppUsageCounts = pUsageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT & setMicromap( MicromapEXT micromap_ ) & VULKAN_HPP_NOEXCEPT { micromap = micromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureTrianglesOpacityMicromapEXT && setMicromap( MicromapEXT micromap_ ) && VULKAN_HPP_NOEXCEPT { micromap = micromap_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureTrianglesOpacityMicromapEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesOpacityMicromapEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesOpacityMicromapEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureTrianglesOpacityMicromapEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, indexType, indexBuffer, indexStride, baseTriangle, usageCountsCount, pUsageCounts, ppUsageCounts, micromap ); } #endif public: StructureType sType = StructureType::eAccelerationStructureTrianglesOpacityMicromapEXT; void * pNext = {}; IndexType indexType = IndexType::eUint16; DeviceOrHostAddressConstKHR indexBuffer = {}; DeviceSize indexStride = {}; uint32_t baseTriangle = {}; uint32_t usageCountsCount = {}; const MicromapUsageEXT * pUsageCounts = {}; const MicromapUsageEXT * const * ppUsageCounts = {}; MicromapEXT micromap = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureTrianglesOpacityMicromapEXT; }; #endif template <> struct CppType { using Type = AccelerationStructureTrianglesOpacityMicromapEXT; }; // wrapper struct for struct VkAccelerationStructureVersionInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAccelerationStructureVersionInfoKHR.html struct AccelerationStructureVersionInfoKHR { using NativeType = VkAccelerationStructureVersionInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pVersionData{ pVersionData_ } { } VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AccelerationStructureVersionInfoKHR( *reinterpret_cast( &rhs ) ) { } AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t * pVersionData_ ) & VULKAN_HPP_NOEXCEPT { pVersionData = pVersionData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR && setPVersionData( const uint8_t * pVersionData_ ) && VULKAN_HPP_NOEXCEPT { pVersionData = pVersionData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAccelerationStructureVersionInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAccelerationStructureVersionInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAccelerationStructureVersionInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pVersionData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AccelerationStructureVersionInfoKHR const & ) const = default; #else bool operator==( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); # endif } bool operator!=( AccelerationStructureVersionInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR; const void * pNext = {}; const uint8_t * pVersionData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AccelerationStructureVersionInfoKHR; }; #endif template <> struct CppType { using Type = AccelerationStructureVersionInfoKHR; }; // wrapper struct for struct VkAcquireNextImageInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireNextImageInfoKHR.html struct AcquireNextImageInfoKHR { using NativeType = VkAcquireNextImageInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, Semaphore semaphore_ = {}, Fence fence_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , swapchain{ swapchain_ } , timeout{ timeout_ } , semaphore{ semaphore_ } , fence{ fence_ } , deviceMask{ deviceMask_ } { } VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AcquireNextImageInfoKHR( *reinterpret_cast( &rhs ) ) { } AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) & VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setSwapchain( SwapchainKHR swapchain_ ) && VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) & VULKAN_HPP_NOEXCEPT { timeout = timeout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setTimeout( uint64_t timeout_ ) && VULKAN_HPP_NOEXCEPT { timeout = timeout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setSemaphore( Semaphore semaphore_ ) & VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setSemaphore( Semaphore semaphore_ ) && VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setFence( Fence fence_ ) & VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setFence( Fence fence_ ) && VULKAN_HPP_NOEXCEPT { fence = fence_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) & VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR && setDeviceMask( uint32_t deviceMask_ ) && VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAcquireNextImageInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAcquireNextImageInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAcquireNextImageInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, swapchain, timeout, semaphore, fence, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AcquireNextImageInfoKHR const & ) const = default; #else bool operator==( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( timeout == rhs.timeout ) && ( semaphore == rhs.semaphore ) && ( fence == rhs.fence ) && ( deviceMask == rhs.deviceMask ); # endif } bool operator!=( AcquireNextImageInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAcquireNextImageInfoKHR; const void * pNext = {}; SwapchainKHR swapchain = {}; uint64_t timeout = {}; Semaphore semaphore = {}; Fence fence = {}; uint32_t deviceMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AcquireNextImageInfoKHR; }; #endif template <> struct CppType { using Type = AcquireNextImageInfoKHR; }; // wrapper struct for struct VkAcquireProfilingLockInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAcquireProfilingLockInfoKHR.html struct AcquireProfilingLockInfoKHR { using NativeType = VkAcquireProfilingLockInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , timeout{ timeout_ } { } VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AcquireProfilingLockInfoKHR( *reinterpret_cast( &rhs ) ) { } AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setFlags( AcquireProfilingLockFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR && setFlags( AcquireProfilingLockFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) & VULKAN_HPP_NOEXCEPT { timeout = timeout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR && setTimeout( uint64_t timeout_ ) && VULKAN_HPP_NOEXCEPT { timeout = timeout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAcquireProfilingLockInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAcquireProfilingLockInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAcquireProfilingLockInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, timeout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AcquireProfilingLockInfoKHR const & ) const = default; #else bool operator==( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( timeout == rhs.timeout ); # endif } bool operator!=( AcquireProfilingLockInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAcquireProfilingLockInfoKHR; const void * pNext = {}; AcquireProfilingLockFlagsKHR flags = {}; uint64_t timeout = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AcquireProfilingLockInfoKHR; }; #endif template <> struct CppType { using Type = AcquireProfilingLockInfoKHR; }; typedef void *( VKAPI_PTR * PFN_AllocationFunction )( void * pUserData, size_t size, size_t alignment, SystemAllocationScope allocationScope ); typedef void *( VKAPI_PTR * PFN_ReallocationFunction )( void * pUserData, void * pOriginal, size_t size, size_t alignment, SystemAllocationScope allocationScope ); typedef void( VKAPI_PTR * PFN_FreeFunction )( void * pUserData, void * pMemory ); typedef void( VKAPI_PTR * PFN_InternalAllocationNotification )( void * pUserData, size_t size, InternalAllocationType allocationType, SystemAllocationScope allocationScope ); typedef void( VKAPI_PTR * PFN_InternalFreeNotification )( void * pUserData, size_t size, InternalAllocationType allocationType, SystemAllocationScope allocationScope ); // wrapper struct for struct VkAllocationCallbacks, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAllocationCallbacks.html struct AllocationCallbacks { using NativeType = VkAllocationCallbacks; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, PFN_AllocationFunction pfnAllocation_ = {}, PFN_ReallocationFunction pfnReallocation_ = {}, PFN_FreeFunction pfnFree_ = {}, PFN_InternalAllocationNotification pfnInternalAllocation_ = {}, PFN_InternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT : pUserData{ pUserData_ } , pfnAllocation{ pfnAllocation_ } , pfnReallocation{ pfnReallocation_ } , pfnFree{ pfnFree_ } , pfnInternalAllocation{ pfnInternalAllocation_ } , pfnInternalFree{ pfnInternalFree_ } { } VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( *reinterpret_cast( &rhs ) ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) AllocationCallbacks( void * pUserData_, PFN_vkAllocationFunction pfnAllocation_, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT : AllocationCallbacks( pUserData_, reinterpret_cast( pfnAllocation_ ), reinterpret_cast( pfnReallocation_ ), reinterpret_cast( pfnFree_ ), reinterpret_cast( pfnInternalAllocation_ ), reinterpret_cast( pfnInternalFree_ ) ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPUserData( void * pUserData_ ) & VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPUserData( void * pUserData_ ) && VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_AllocationFunction pfnAllocation_ ) & VULKAN_HPP_NOEXCEPT { pfnAllocation = pfnAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPfnAllocation( PFN_AllocationFunction pfnAllocation_ ) && VULKAN_HPP_NOEXCEPT { pfnAllocation = pfnAllocation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_ReallocationFunction pfnReallocation_ ) & VULKAN_HPP_NOEXCEPT { pfnReallocation = pfnReallocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPfnReallocation( PFN_ReallocationFunction pfnReallocation_ ) && VULKAN_HPP_NOEXCEPT { pfnReallocation = pfnReallocation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_FreeFunction pfnFree_ ) & VULKAN_HPP_NOEXCEPT { pfnFree = pfnFree_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPfnFree( PFN_FreeFunction pfnFree_ ) && VULKAN_HPP_NOEXCEPT { pfnFree = pfnFree_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_InternalAllocationNotification pfnInternalAllocation_ ) & VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPfnInternalAllocation( PFN_InternalAllocationNotification pfnInternalAllocation_ ) && VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_InternalFreeNotification pfnInternalFree_ ) & VULKAN_HPP_NOEXCEPT { pfnInternalFree = pfnInternalFree_; return *this; } VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks && setPfnInternalFree( PFN_InternalFreeNotification pfnInternalFree_ ) && VULKAN_HPP_NOEXCEPT { pfnInternalFree = pfnInternalFree_; return std::move( *this ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT { return setPfnAllocation( reinterpret_cast( pfnAllocation_ ) ); } VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT { return setPfnReallocation( reinterpret_cast( pfnReallocation_ ) ); } VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT { return setPfnInternalAllocation( reinterpret_cast( pfnInternalAllocation_ ) ); } VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT { return setPfnInternalFree( reinterpret_cast( pfnInternalFree_ ) ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAllocationCallbacks const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAllocationCallbacks *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( pUserData, pfnAllocation, pfnReallocation, pfnFree, pfnInternalAllocation, pfnInternalFree ); } #endif bool operator==( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( pUserData == rhs.pUserData ) && ( pfnAllocation == rhs.pfnAllocation ) && ( pfnReallocation == rhs.pfnReallocation ) && ( pfnFree == rhs.pfnFree ) && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) && ( pfnInternalFree == rhs.pfnInternalFree ); #endif } bool operator!=( AllocationCallbacks const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: void * pUserData = {}; PFN_AllocationFunction pfnAllocation = {}; PFN_ReallocationFunction pfnReallocation = {}; PFN_FreeFunction pfnFree = {}; PFN_InternalAllocationNotification pfnInternalAllocation = {}; PFN_InternalFreeNotification pfnInternalFree = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AllocationCallbacks; }; #endif // wrapper struct for struct VkAmigoProfilingSubmitInfoSEC, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAmigoProfilingSubmitInfoSEC.html struct AmigoProfilingSubmitInfoSEC { using NativeType = VkAmigoProfilingSubmitInfoSEC; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAmigoProfilingSubmitInfoSEC; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , firstDrawTimestamp{ firstDrawTimestamp_ } , swapBufferTimestamp{ swapBufferTimestamp_ } { } VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; AmigoProfilingSubmitInfoSEC( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT : AmigoProfilingSubmitInfoSEC( *reinterpret_cast( &rhs ) ) { } AmigoProfilingSubmitInfoSEC & operator=( AmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AmigoProfilingSubmitInfoSEC & operator=( VkAmigoProfilingSubmitInfoSEC const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) & VULKAN_HPP_NOEXCEPT { firstDrawTimestamp = firstDrawTimestamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC && setFirstDrawTimestamp( uint64_t firstDrawTimestamp_ ) && VULKAN_HPP_NOEXCEPT { firstDrawTimestamp = firstDrawTimestamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC & setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) & VULKAN_HPP_NOEXCEPT { swapBufferTimestamp = swapBufferTimestamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AmigoProfilingSubmitInfoSEC && setSwapBufferTimestamp( uint64_t swapBufferTimestamp_ ) && VULKAN_HPP_NOEXCEPT { swapBufferTimestamp = swapBufferTimestamp_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAmigoProfilingSubmitInfoSEC const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAmigoProfilingSubmitInfoSEC &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAmigoProfilingSubmitInfoSEC const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAmigoProfilingSubmitInfoSEC *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, firstDrawTimestamp, swapBufferTimestamp ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AmigoProfilingSubmitInfoSEC const & ) const = default; #else bool operator==( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( firstDrawTimestamp == rhs.firstDrawTimestamp ) && ( swapBufferTimestamp == rhs.swapBufferTimestamp ); # endif } bool operator!=( AmigoProfilingSubmitInfoSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAmigoProfilingSubmitInfoSEC; const void * pNext = {}; uint64_t firstDrawTimestamp = {}; uint64_t swapBufferTimestamp = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AmigoProfilingSubmitInfoSEC; }; #endif template <> struct CppType { using Type = AmigoProfilingSubmitInfoSEC; }; // wrapper struct for struct VkComponentMapping, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComponentMapping.html struct ComponentMapping { using NativeType = VkComponentMapping; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentSwizzle r_ = ComponentSwizzle::eIdentity, ComponentSwizzle g_ = ComponentSwizzle::eIdentity, ComponentSwizzle b_ = ComponentSwizzle::eIdentity, ComponentSwizzle a_ = ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT : r{ r_ } , g{ g_ } , b{ b_ } , a{ a_ } { } VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT : ComponentMapping( *reinterpret_cast( &rhs ) ) {} ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setR( ComponentSwizzle r_ ) & VULKAN_HPP_NOEXCEPT { r = r_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping && setR( ComponentSwizzle r_ ) && VULKAN_HPP_NOEXCEPT { r = r_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setG( ComponentSwizzle g_ ) & VULKAN_HPP_NOEXCEPT { g = g_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping && setG( ComponentSwizzle g_ ) && VULKAN_HPP_NOEXCEPT { g = g_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setB( ComponentSwizzle b_ ) & VULKAN_HPP_NOEXCEPT { b = b_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping && setB( ComponentSwizzle b_ ) && VULKAN_HPP_NOEXCEPT { b = b_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComponentMapping & setA( ComponentSwizzle a_ ) & VULKAN_HPP_NOEXCEPT { a = a_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComponentMapping && setA( ComponentSwizzle a_ ) && VULKAN_HPP_NOEXCEPT { a = a_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkComponentMapping const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComponentMapping const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkComponentMapping *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( r, g, b, a ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ComponentMapping const & ) const = default; #else bool operator==( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( r == rhs.r ) && ( g == rhs.g ) && ( b == rhs.b ) && ( a == rhs.a ); # endif } bool operator!=( ComponentMapping const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ComponentSwizzle r = ComponentSwizzle::eIdentity; ComponentSwizzle g = ComponentSwizzle::eIdentity; ComponentSwizzle b = ComponentSwizzle::eIdentity; ComponentSwizzle a = ComponentSwizzle::eIdentity; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ComponentMapping; }; #endif #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidHardwareBufferFormatProperties2ANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatProperties2ANDROID.html struct AndroidHardwareBufferFormatProperties2ANDROID { using NativeType = VkAndroidHardwareBufferFormatProperties2ANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( Format format_ = Format::eUndefined, uint64_t externalFormat_ = {}, FormatFeatureFlags2 formatFeatures_ = {}, ComponentMapping samplerYcbcrConversionComponents_ = {}, SamplerYcbcrModelConversion suggestedYcbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange suggestedYcbcrRange_ = SamplerYcbcrRange::eItuFull, ChromaLocation suggestedXChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation suggestedYChromaOffset_ = ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , format{ format_ } , externalFormat{ externalFormat_ } , formatFeatures{ formatFeatures_ } , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } , suggestedYcbcrModel{ suggestedYcbcrModel_ } , suggestedYcbcrRange{ suggestedYcbcrRange_ } , suggestedXChromaOffset{ suggestedXChromaOffset_ } , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatProperties2ANDROID( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatProperties2ANDROID( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferFormatProperties2ANDROID( *reinterpret_cast( &rhs ) ) { } AndroidHardwareBufferFormatProperties2ANDROID & operator=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferFormatProperties2ANDROID & operator=( VkAndroidHardwareBufferFormatProperties2ANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAndroidHardwareBufferFormatProperties2ANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatProperties2ANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatProperties2ANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatProperties2ANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidHardwareBufferFormatProperties2ANDROID const & ) const = default; # else bool operator==( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); # endif } bool operator!=( AndroidHardwareBufferFormatProperties2ANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidHardwareBufferFormatProperties2ANDROID; void * pNext = {}; Format format = Format::eUndefined; uint64_t externalFormat = {}; FormatFeatureFlags2 formatFeatures = {}; ComponentMapping samplerYcbcrConversionComponents = {}; SamplerYcbcrModelConversion suggestedYcbcrModel = SamplerYcbcrModelConversion::eRgbIdentity; SamplerYcbcrRange suggestedYcbcrRange = SamplerYcbcrRange::eItuFull; ChromaLocation suggestedXChromaOffset = ChromaLocation::eCositedEven; ChromaLocation suggestedYChromaOffset = ChromaLocation::eCositedEven; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidHardwareBufferFormatProperties2ANDROID; }; # endif template <> struct CppType { using Type = AndroidHardwareBufferFormatProperties2ANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidHardwareBufferFormatPropertiesANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html struct AndroidHardwareBufferFormatPropertiesANDROID { using NativeType = VkAndroidHardwareBufferFormatPropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( Format format_ = Format::eUndefined, uint64_t externalFormat_ = {}, FormatFeatureFlags formatFeatures_ = {}, ComponentMapping samplerYcbcrConversionComponents_ = {}, SamplerYcbcrModelConversion suggestedYcbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange suggestedYcbcrRange_ = SamplerYcbcrRange::eItuFull, ChromaLocation suggestedXChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation suggestedYChromaOffset_ = ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , format{ format_ } , externalFormat{ externalFormat_ } , formatFeatures{ formatFeatures_ } , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } , suggestedYcbcrModel{ suggestedYcbcrModel_ } , suggestedYcbcrRange{ suggestedYcbcrRange_ } , suggestedXChromaOffset{ suggestedXChromaOffset_ } , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast( &rhs ) ) { } AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAndroidHardwareBufferFormatPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatPropertiesANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const & ) const = default; # else bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); # endif } bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; void * pNext = {}; Format format = Format::eUndefined; uint64_t externalFormat = {}; FormatFeatureFlags formatFeatures = {}; ComponentMapping samplerYcbcrConversionComponents = {}; SamplerYcbcrModelConversion suggestedYcbcrModel = SamplerYcbcrModelConversion::eRgbIdentity; SamplerYcbcrRange suggestedYcbcrRange = SamplerYcbcrRange::eItuFull; ChromaLocation suggestedXChromaOffset = ChromaLocation::eCositedEven; ChromaLocation suggestedYChromaOffset = ChromaLocation::eCositedEven; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidHardwareBufferFormatPropertiesANDROID; }; # endif template <> struct CppType { using Type = AndroidHardwareBufferFormatPropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferFormatResolvePropertiesANDROID.html struct AndroidHardwareBufferFormatResolvePropertiesANDROID { using NativeType = VkAndroidHardwareBufferFormatResolvePropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID( Format colorAttachmentFormat_ = Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorAttachmentFormat{ colorAttachmentFormat_ } { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferFormatResolvePropertiesANDROID( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) { } AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferFormatResolvePropertiesANDROID & operator=( VkAndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidHardwareBufferFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, colorAttachmentFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidHardwareBufferFormatResolvePropertiesANDROID const & ) const = default; # else bool operator==( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentFormat == rhs.colorAttachmentFormat ); # endif } bool operator!=( AndroidHardwareBufferFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID; void * pNext = {}; Format colorAttachmentFormat = Format::eUndefined; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; }; # endif template <> struct CppType { using Type = AndroidHardwareBufferFormatResolvePropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidHardwareBufferPropertiesANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferPropertiesANDROID.html struct AndroidHardwareBufferPropertiesANDROID { using NativeType = VkAndroidHardwareBufferPropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , allocationSize{ allocationSize_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast( &rhs ) ) { } AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAndroidHardwareBufferPropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferPropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidHardwareBufferPropertiesANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, allocationSize, memoryTypeBits ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidHardwareBufferPropertiesANDROID const & ) const = default; # else bool operator==( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( AndroidHardwareBufferPropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; void * pNext = {}; DeviceSize allocationSize = {}; uint32_t memoryTypeBits = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidHardwareBufferPropertiesANDROID; }; # endif template <> struct CppType { using Type = AndroidHardwareBufferPropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidHardwareBufferUsageANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidHardwareBufferUsageANDROID.html struct AndroidHardwareBufferUsageANDROID { using NativeType = VkAndroidHardwareBufferUsageANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , androidHardwareBufferUsage{ androidHardwareBufferUsage_ } { } VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidHardwareBufferUsageANDROID( *reinterpret_cast( &rhs ) ) { } AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkAndroidHardwareBufferUsageANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidHardwareBufferUsageANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidHardwareBufferUsageANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, androidHardwareBufferUsage ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidHardwareBufferUsageANDROID const & ) const = default; # else bool operator==( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); # endif } bool operator!=( AndroidHardwareBufferUsageANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; void * pNext = {}; uint64_t androidHardwareBufferUsage = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidHardwareBufferUsageANDROID; }; # endif template <> struct CppType { using Type = AndroidHardwareBufferUsageANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkAndroidSurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAndroidSurfaceCreateInfoKHR.html struct AndroidSurfaceCreateInfoKHR { using NativeType = VkAndroidSurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , window{ window_ } { } VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : AndroidSurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setFlags( AndroidSurfaceCreateFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR && setFlags( AndroidSurfaceCreateFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow * window_ ) & VULKAN_HPP_NOEXCEPT { window = window_; return *this; } VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR && setWindow( struct ANativeWindow * window_ ) && VULKAN_HPP_NOEXCEPT { window = window_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAndroidSurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAndroidSurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAndroidSurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, window ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AndroidSurfaceCreateInfoKHR const & ) const = default; # else bool operator==( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( window == rhs.window ); # endif } bool operator!=( AndroidSurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; const void * pNext = {}; AndroidSurfaceCreateFlagsKHR flags = {}; struct ANativeWindow * window = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AndroidSurfaceCreateInfoKHR; }; # endif template <> struct CppType { using Type = AndroidSurfaceCreateInfoKHR; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ // wrapper struct for struct VkAntiLagPresentationInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagPresentationInfoAMD.html struct AntiLagPresentationInfoAMD { using NativeType = VkAntiLagPresentationInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagPresentationInfoAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagStageAMD stage_ = AntiLagStageAMD::eInput, uint64_t frameIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stage{ stage_ } , frameIndex{ frameIndex_ } { } VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagPresentationInfoAMD( *reinterpret_cast( &rhs ) ) { } AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( AntiLagStageAMD stage_ ) & VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD && setStage( AntiLagStageAMD stage_ ) && VULKAN_HPP_NOEXCEPT { stage = stage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) & VULKAN_HPP_NOEXCEPT { frameIndex = frameIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD && setFrameIndex( uint64_t frameIndex_ ) && VULKAN_HPP_NOEXCEPT { frameIndex = frameIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAntiLagPresentationInfoAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAntiLagPresentationInfoAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stage, frameIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default; #else bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( frameIndex == rhs.frameIndex ); # endif } bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAntiLagPresentationInfoAMD; void * pNext = {}; AntiLagStageAMD stage = AntiLagStageAMD::eInput; uint64_t frameIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AntiLagPresentationInfoAMD; }; #endif template <> struct CppType { using Type = AntiLagPresentationInfoAMD; }; // wrapper struct for struct VkAntiLagDataAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAntiLagDataAMD.html struct AntiLagDataAMD { using NativeType = VkAntiLagDataAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagDataAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagModeAMD mode_ = AntiLagModeAMD::eDriverControl, uint32_t maxFPS_ = {}, const AntiLagPresentationInfoAMD * pPresentationInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mode{ mode_ } , maxFPS{ maxFPS_ } , pPresentationInfo{ pPresentationInfo_ } { } VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagDataAMD( *reinterpret_cast( &rhs ) ) {} AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( AntiLagModeAMD mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD && setMode( AntiLagModeAMD mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) & VULKAN_HPP_NOEXCEPT { maxFPS = maxFPS_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD && setMaxFPS( uint32_t maxFPS_ ) && VULKAN_HPP_NOEXCEPT { maxFPS = maxFPS_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPPresentationInfo( const AntiLagPresentationInfoAMD * pPresentationInfo_ ) & VULKAN_HPP_NOEXCEPT { pPresentationInfo = pPresentationInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD && setPPresentationInfo( const AntiLagPresentationInfoAMD * pPresentationInfo_ ) && VULKAN_HPP_NOEXCEPT { pPresentationInfo = pPresentationInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAntiLagDataAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAntiLagDataAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AntiLagDataAMD const & ) const = default; #else bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( maxFPS == rhs.maxFPS ) && ( pPresentationInfo == rhs.pPresentationInfo ); # endif } bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAntiLagDataAMD; const void * pNext = {}; AntiLagModeAMD mode = AntiLagModeAMD::eDriverControl; uint32_t maxFPS = {}; const AntiLagPresentationInfoAMD * pPresentationInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AntiLagDataAMD; }; #endif template <> struct CppType { using Type = AntiLagDataAMD; }; // wrapper struct for struct VkApplicationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkApplicationInfo.html struct ApplicationInfo { using NativeType = VkApplicationInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ApplicationInfo( const char * pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char * pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pApplicationName{ pApplicationName_ } , applicationVersion{ applicationVersion_ } , pEngineName{ pEngineName_ } , engineVersion{ engineVersion_ } , apiVersion{ apiVersion_ } { } VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ApplicationInfo( *reinterpret_cast( &rhs ) ) {} ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPApplicationName( const char * pApplicationName_ ) & VULKAN_HPP_NOEXCEPT { pApplicationName = pApplicationName_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setPApplicationName( const char * pApplicationName_ ) && VULKAN_HPP_NOEXCEPT { pApplicationName = pApplicationName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) & VULKAN_HPP_NOEXCEPT { applicationVersion = applicationVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setApplicationVersion( uint32_t applicationVersion_ ) && VULKAN_HPP_NOEXCEPT { applicationVersion = applicationVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setPEngineName( const char * pEngineName_ ) & VULKAN_HPP_NOEXCEPT { pEngineName = pEngineName_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setPEngineName( const char * pEngineName_ ) && VULKAN_HPP_NOEXCEPT { pEngineName = pEngineName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) & VULKAN_HPP_NOEXCEPT { engineVersion = engineVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setEngineVersion( uint32_t engineVersion_ ) && VULKAN_HPP_NOEXCEPT { engineVersion = engineVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) & VULKAN_HPP_NOEXCEPT { apiVersion = apiVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 ApplicationInfo && setApiVersion( uint32_t apiVersion_ ) && VULKAN_HPP_NOEXCEPT { apiVersion = apiVersion_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkApplicationInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkApplicationInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkApplicationInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pApplicationName, applicationVersion, pEngineName, engineVersion, apiVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( pApplicationName != rhs.pApplicationName ) if ( auto cmp = strcmp( pApplicationName, rhs.pApplicationName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = applicationVersion <=> rhs.applicationVersion; cmp != 0 ) return cmp; if ( pEngineName != rhs.pEngineName ) if ( auto cmp = strcmp( pEngineName, rhs.pEngineName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = engineVersion <=> rhs.engineVersion; cmp != 0 ) return cmp; if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pApplicationName == rhs.pApplicationName ) || ( strcmp( pApplicationName, rhs.pApplicationName ) == 0 ) ) && ( applicationVersion == rhs.applicationVersion ) && ( ( pEngineName == rhs.pEngineName ) || ( strcmp( pEngineName, rhs.pEngineName ) == 0 ) ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); } bool operator!=( ApplicationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eApplicationInfo; const void * pNext = {}; const char * pApplicationName = {}; uint32_t applicationVersion = {}; const char * pEngineName = {}; uint32_t engineVersion = {}; uint32_t apiVersion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ApplicationInfo; }; #endif template <> struct CppType { using Type = ApplicationInfo; }; // wrapper struct for struct VkAttachmentDescription, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription.html struct AttachmentDescription { using NativeType = VkAttachmentDescription; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescriptionFlags flags_ = {}, Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : flags{ flags_ } , format{ format_ } , samples{ samples_ } , loadOp{ loadOp_ } , storeOp{ storeOp_ } , stencilLoadOp{ stencilLoadOp_ } , stencilStoreOp{ stencilStoreOp_ } , initialLayout{ initialLayout_ } , finalLayout{ finalLayout_ } { } VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentDescription( *reinterpret_cast( &rhs ) ) { } AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFlags( AttachmentDescriptionFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setFlags( AttachmentDescriptionFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setSamples( SampleCountFlagBits samples_ ) & VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setSamples( SampleCountFlagBits samples_ ) && VULKAN_HPP_NOEXCEPT { samples = samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setLoadOp( AttachmentLoadOp loadOp_ ) & VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setLoadOp( AttachmentLoadOp loadOp_ ) && VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStoreOp( AttachmentStoreOp storeOp_ ) & VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setStoreOp( AttachmentStoreOp storeOp_ ) && VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) & VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) && VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) & VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) && VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setInitialLayout( ImageLayout initialLayout_ ) & VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setInitialLayout( ImageLayout initialLayout_ ) && VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & setFinalLayout( ImageLayout finalLayout_ ) & VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription && setFinalLayout( ImageLayout finalLayout_ ) && VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescription const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentDescription *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentDescription const & ) const = default; #else bool operator==( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); # endif } bool operator!=( AttachmentDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: AttachmentDescriptionFlags flags = {}; Format format = Format::eUndefined; SampleCountFlagBits samples = SampleCountFlagBits::e1; AttachmentLoadOp loadOp = AttachmentLoadOp::eLoad; AttachmentStoreOp storeOp = AttachmentStoreOp::eStore; AttachmentLoadOp stencilLoadOp = AttachmentLoadOp::eLoad; AttachmentStoreOp stencilStoreOp = AttachmentStoreOp::eStore; ImageLayout initialLayout = ImageLayout::eUndefined; ImageLayout finalLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentDescription; }; #endif // wrapper struct for struct VkAttachmentDescription2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescription2.html struct AttachmentDescription2 { using NativeType = VkAttachmentDescription2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescriptionFlags flags_ = {}, Format format_ = Format::eUndefined, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, AttachmentLoadOp loadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp storeOp_ = AttachmentStoreOp::eStore, AttachmentLoadOp stencilLoadOp_ = AttachmentLoadOp::eLoad, AttachmentStoreOp stencilStoreOp_ = AttachmentStoreOp::eStore, ImageLayout initialLayout_ = ImageLayout::eUndefined, ImageLayout finalLayout_ = ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , format{ format_ } , samples{ samples_ } , loadOp{ loadOp_ } , storeOp{ storeOp_ } , stencilLoadOp{ stencilLoadOp_ } , stencilStoreOp{ stencilStoreOp_ } , initialLayout{ initialLayout_ } , finalLayout{ finalLayout_ } { } VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentDescription2( *reinterpret_cast( &rhs ) ) { } AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFlags( AttachmentDescriptionFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setFlags( AttachmentDescriptionFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setSamples( SampleCountFlagBits samples_ ) & VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setSamples( SampleCountFlagBits samples_ ) && VULKAN_HPP_NOEXCEPT { samples = samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setLoadOp( AttachmentLoadOp loadOp_ ) & VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setLoadOp( AttachmentLoadOp loadOp_ ) && VULKAN_HPP_NOEXCEPT { loadOp = loadOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStoreOp( AttachmentStoreOp storeOp_ ) & VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setStoreOp( AttachmentStoreOp storeOp_ ) && VULKAN_HPP_NOEXCEPT { storeOp = storeOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) & VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setStencilLoadOp( AttachmentLoadOp stencilLoadOp_ ) && VULKAN_HPP_NOEXCEPT { stencilLoadOp = stencilLoadOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) & VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setStencilStoreOp( AttachmentStoreOp stencilStoreOp_ ) && VULKAN_HPP_NOEXCEPT { stencilStoreOp = stencilStoreOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setInitialLayout( ImageLayout initialLayout_ ) & VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setInitialLayout( ImageLayout initialLayout_ ) && VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & setFinalLayout( ImageLayout finalLayout_ ) & VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 && setFinalLayout( ImageLayout finalLayout_ ) && VULKAN_HPP_NOEXCEPT { finalLayout = finalLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescription2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescription2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentDescription2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, format, samples, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentDescription2 const & ) const = default; #else bool operator==( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( format == rhs.format ) && ( samples == rhs.samples ) && ( loadOp == rhs.loadOp ) && ( storeOp == rhs.storeOp ) && ( stencilLoadOp == rhs.stencilLoadOp ) && ( stencilStoreOp == rhs.stencilStoreOp ) && ( initialLayout == rhs.initialLayout ) && ( finalLayout == rhs.finalLayout ); # endif } bool operator!=( AttachmentDescription2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentDescription2; const void * pNext = {}; AttachmentDescriptionFlags flags = {}; Format format = Format::eUndefined; SampleCountFlagBits samples = SampleCountFlagBits::e1; AttachmentLoadOp loadOp = AttachmentLoadOp::eLoad; AttachmentStoreOp storeOp = AttachmentStoreOp::eStore; AttachmentLoadOp stencilLoadOp = AttachmentLoadOp::eLoad; AttachmentStoreOp stencilStoreOp = AttachmentStoreOp::eStore; ImageLayout initialLayout = ImageLayout::eUndefined; ImageLayout finalLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentDescription2; }; #endif template <> struct CppType { using Type = AttachmentDescription2; }; using AttachmentDescription2KHR = AttachmentDescription2; // wrapper struct for struct VkAttachmentDescriptionStencilLayout, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentDescriptionStencilLayout.html struct AttachmentDescriptionStencilLayout { using NativeType = VkAttachmentDescriptionStencilLayout; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( ImageLayout stencilInitialLayout_ = ImageLayout::eUndefined, ImageLayout stencilFinalLayout_ = ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stencilInitialLayout{ stencilInitialLayout_ } , stencilFinalLayout{ stencilFinalLayout_ } { } VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentDescriptionStencilLayout( *reinterpret_cast( &rhs ) ) { } AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilInitialLayout( ImageLayout stencilInitialLayout_ ) & VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout && setStencilInitialLayout( ImageLayout stencilInitialLayout_ ) && VULKAN_HPP_NOEXCEPT { stencilInitialLayout = stencilInitialLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & setStencilFinalLayout( ImageLayout stencilFinalLayout_ ) & VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout && setStencilFinalLayout( ImageLayout stencilFinalLayout_ ) && VULKAN_HPP_NOEXCEPT { stencilFinalLayout = stencilFinalLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentDescriptionStencilLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentDescriptionStencilLayout const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentDescriptionStencilLayout *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stencilInitialLayout, stencilFinalLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentDescriptionStencilLayout const & ) const = default; #else bool operator==( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilInitialLayout == rhs.stencilInitialLayout ) && ( stencilFinalLayout == rhs.stencilFinalLayout ); # endif } bool operator!=( AttachmentDescriptionStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentDescriptionStencilLayout; void * pNext = {}; ImageLayout stencilInitialLayout = ImageLayout::eUndefined; ImageLayout stencilFinalLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentDescriptionStencilLayout; }; #endif template <> struct CppType { using Type = AttachmentDescriptionStencilLayout; }; using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout; // wrapper struct for struct VkAttachmentFeedbackLoopInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentFeedbackLoopInfoEXT.html struct AttachmentFeedbackLoopInfoEXT { using NativeType = VkAttachmentFeedbackLoopInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentFeedbackLoopInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT( Bool32 feedbackLoopEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , feedbackLoopEnable{ feedbackLoopEnable_ } { } VULKAN_HPP_CONSTEXPR AttachmentFeedbackLoopInfoEXT( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentFeedbackLoopInfoEXT( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentFeedbackLoopInfoEXT( *reinterpret_cast( &rhs ) ) { } AttachmentFeedbackLoopInfoEXT & operator=( AttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentFeedbackLoopInfoEXT & operator=( VkAttachmentFeedbackLoopInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT & setFeedbackLoopEnable( Bool32 feedbackLoopEnable_ ) & VULKAN_HPP_NOEXCEPT { feedbackLoopEnable = feedbackLoopEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentFeedbackLoopInfoEXT && setFeedbackLoopEnable( Bool32 feedbackLoopEnable_ ) && VULKAN_HPP_NOEXCEPT { feedbackLoopEnable = feedbackLoopEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentFeedbackLoopInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentFeedbackLoopInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentFeedbackLoopInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentFeedbackLoopInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, feedbackLoopEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentFeedbackLoopInfoEXT const & ) const = default; #else bool operator==( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( feedbackLoopEnable == rhs.feedbackLoopEnable ); # endif } bool operator!=( AttachmentFeedbackLoopInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentFeedbackLoopInfoEXT; const void * pNext = {}; Bool32 feedbackLoopEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentFeedbackLoopInfoEXT; }; #endif template <> struct CppType { using Type = AttachmentFeedbackLoopInfoEXT; }; // wrapper struct for struct VkAttachmentReference, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference.html struct AttachmentReference { using NativeType = VkAttachmentReference; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, ImageLayout layout_ = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : attachment{ attachment_ } , layout{ layout_ } { } VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference( *reinterpret_cast( &rhs ) ) { } AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setAttachment( uint32_t attachment_ ) & VULKAN_HPP_NOEXCEPT { attachment = attachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference && setAttachment( uint32_t attachment_ ) && VULKAN_HPP_NOEXCEPT { attachment = attachment_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentReference & setLayout( ImageLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference && setLayout( ImageLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReference const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReference const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentReference *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( attachment, layout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentReference const & ) const = default; #else bool operator==( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( attachment == rhs.attachment ) && ( layout == rhs.layout ); # endif } bool operator!=( AttachmentReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t attachment = {}; ImageLayout layout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentReference; }; #endif // wrapper struct for struct VkAttachmentReference2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReference2.html struct AttachmentReference2 { using NativeType = VkAttachmentReference2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReference2( uint32_t attachment_ = {}, ImageLayout layout_ = ImageLayout::eUndefined, ImageAspectFlags aspectMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , attachment{ attachment_ } , layout{ layout_ } , aspectMask{ aspectMask_ } { } VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReference2( *reinterpret_cast( &rhs ) ) { } AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAttachment( uint32_t attachment_ ) & VULKAN_HPP_NOEXCEPT { attachment = attachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 && setAttachment( uint32_t attachment_ ) && VULKAN_HPP_NOEXCEPT { attachment = attachment_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setLayout( ImageLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 && setLayout( ImageLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReference2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReference2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentReference2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, attachment, layout, aspectMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentReference2 const & ) const = default; #else bool operator==( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachment == rhs.attachment ) && ( layout == rhs.layout ) && ( aspectMask == rhs.aspectMask ); # endif } bool operator!=( AttachmentReference2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentReference2; const void * pNext = {}; uint32_t attachment = {}; ImageLayout layout = ImageLayout::eUndefined; ImageAspectFlags aspectMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentReference2; }; #endif template <> struct CppType { using Type = AttachmentReference2; }; using AttachmentReference2KHR = AttachmentReference2; // wrapper struct for struct VkAttachmentReferenceStencilLayout, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentReferenceStencilLayout.html struct AttachmentReferenceStencilLayout { using NativeType = VkAttachmentReferenceStencilLayout; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( ImageLayout stencilLayout_ = ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stencilLayout{ stencilLayout_ } { } VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentReferenceStencilLayout( *reinterpret_cast( &rhs ) ) { } AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & setStencilLayout( ImageLayout stencilLayout_ ) & VULKAN_HPP_NOEXCEPT { stencilLayout = stencilLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout && setStencilLayout( ImageLayout stencilLayout_ ) && VULKAN_HPP_NOEXCEPT { stencilLayout = stencilLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentReferenceStencilLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentReferenceStencilLayout const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentReferenceStencilLayout *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stencilLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentReferenceStencilLayout const & ) const = default; #else bool operator==( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilLayout == rhs.stencilLayout ); # endif } bool operator!=( AttachmentReferenceStencilLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentReferenceStencilLayout; void * pNext = {}; ImageLayout stencilLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentReferenceStencilLayout; }; #endif template <> struct CppType { using Type = AttachmentReferenceStencilLayout; }; using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout; // wrapper struct for struct VkAttachmentSampleCountInfoAMD, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleCountInfoAMD.html struct AttachmentSampleCountInfoAMD { using NativeType = VkAttachmentSampleCountInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentSampleCountInfoAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( uint32_t colorAttachmentCount_ = {}, const SampleCountFlagBits * pColorAttachmentSamples_ = {}, SampleCountFlagBits depthStencilAttachmentSamples_ = SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorAttachmentCount{ colorAttachmentCount_ } , pColorAttachmentSamples{ pColorAttachmentSamples_ } , depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ } { } VULKAN_HPP_CONSTEXPR AttachmentSampleCountInfoAMD( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleCountInfoAMD( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentSampleCountInfoAMD( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AttachmentSampleCountInfoAMD( ArrayProxyNoTemporaries const & colorAttachmentSamples_, SampleCountFlagBits depthStencilAttachmentSamples_ = SampleCountFlagBits::e1, const void * pNext_ = nullptr ) : pNext( pNext_ ) , colorAttachmentCount( static_cast( colorAttachmentSamples_.size() ) ) , pColorAttachmentSamples( colorAttachmentSamples_.data() ) , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ AttachmentSampleCountInfoAMD & operator=( AttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentSampleCountInfoAMD & operator=( VkAttachmentSampleCountInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) & VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD && setColorAttachmentCount( uint32_t colorAttachmentCount_ ) && VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setPColorAttachmentSamples( const SampleCountFlagBits * pColorAttachmentSamples_ ) & VULKAN_HPP_NOEXCEPT { pColorAttachmentSamples = pColorAttachmentSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD && setPColorAttachmentSamples( const SampleCountFlagBits * pColorAttachmentSamples_ ) && VULKAN_HPP_NOEXCEPT { pColorAttachmentSamples = pColorAttachmentSamples_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) AttachmentSampleCountInfoAMD & setColorAttachmentSamples( ArrayProxyNoTemporaries const & colorAttachmentSamples_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentSamples_.size() ); pColorAttachmentSamples = colorAttachmentSamples_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD & setDepthStencilAttachmentSamples( SampleCountFlagBits depthStencilAttachmentSamples_ ) & VULKAN_HPP_NOEXCEPT { depthStencilAttachmentSamples = depthStencilAttachmentSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleCountInfoAMD && setDepthStencilAttachmentSamples( SampleCountFlagBits depthStencilAttachmentSamples_ ) && VULKAN_HPP_NOEXCEPT { depthStencilAttachmentSamples = depthStencilAttachmentSamples_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentSampleCountInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentSampleCountInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentSampleCountInfoAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentSampleCountInfoAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, colorAttachmentCount, pColorAttachmentSamples, depthStencilAttachmentSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentSampleCountInfoAMD const & ) const = default; #else bool operator==( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentSamples == rhs.pColorAttachmentSamples ) && ( depthStencilAttachmentSamples == rhs.depthStencilAttachmentSamples ); # endif } bool operator!=( AttachmentSampleCountInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eAttachmentSampleCountInfoAMD; const void * pNext = {}; uint32_t colorAttachmentCount = {}; const SampleCountFlagBits * pColorAttachmentSamples = {}; SampleCountFlagBits depthStencilAttachmentSamples = SampleCountFlagBits::e1; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentSampleCountInfoAMD; }; #endif template <> struct CppType { using Type = AttachmentSampleCountInfoAMD; }; using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; // wrapper struct for struct VkExtent2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent2D.html struct Extent2D { using NativeType = VkExtent2D; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT : width{ width_ } , height{ height_ } { } VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent2D( *reinterpret_cast( &rhs ) ) {} Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Extent2D & setWidth( uint32_t width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent2D && setWidth( uint32_t width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Extent2D & setHeight( uint32_t height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent2D && setHeight( uint32_t height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExtent2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtent2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtent2D const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExtent2D *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( width, height ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Extent2D const & ) const = default; #else bool operator==( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( width == rhs.width ) && ( height == rhs.height ); # endif } bool operator!=( Extent2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t width = {}; uint32_t height = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Extent2D; }; #endif // wrapper struct for struct VkSampleLocationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationEXT.html struct SampleLocationEXT { using NativeType = VkSampleLocationEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } { } VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationEXT( *reinterpret_cast( &rhs ) ) {} SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setX( float x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT && setX( float x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & setY( float y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT && setY( float y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSampleLocationEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSampleLocationEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSampleLocationEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SampleLocationEXT const & ) const = default; #else bool operator==( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ); # endif } bool operator!=( SampleLocationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float x = {}; float y = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SampleLocationEXT; }; #endif // wrapper struct for struct VkSampleLocationsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSampleLocationsInfoEXT.html struct SampleLocationsInfoEXT { using NativeType = VkSampleLocationsInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_ = SampleCountFlagBits::e1, Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const SampleLocationEXT * pSampleLocations_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , sampleLocationsPerPixel{ sampleLocationsPerPixel_ } , sampleLocationGridSize{ sampleLocationGridSize_ } , sampleLocationsCount{ sampleLocationsCount_ } , pSampleLocations{ pSampleLocations_ } { } VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : SampleLocationsInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SampleLocationsInfoEXT( SampleCountFlagBits sampleLocationsPerPixel_, Extent2D sampleLocationGridSize_, ArrayProxyNoTemporaries const & sampleLocations_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) , sampleLocationGridSize( sampleLocationGridSize_ ) , sampleLocationsCount( static_cast( sampleLocations_.size() ) ) , pSampleLocations( sampleLocations_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ ) & VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT && setSampleLocationsPerPixel( SampleCountFlagBits sampleLocationsPerPixel_ ) && VULKAN_HPP_NOEXCEPT { sampleLocationsPerPixel = sampleLocationsPerPixel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationGridSize( Extent2D const & sampleLocationGridSize_ ) & VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT && setSampleLocationGridSize( Extent2D const & sampleLocationGridSize_ ) && VULKAN_HPP_NOEXCEPT { sampleLocationGridSize = sampleLocationGridSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) & VULKAN_HPP_NOEXCEPT { sampleLocationsCount = sampleLocationsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT && setSampleLocationsCount( uint32_t sampleLocationsCount_ ) && VULKAN_HPP_NOEXCEPT { sampleLocationsCount = sampleLocationsCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & setPSampleLocations( const SampleLocationEXT * pSampleLocations_ ) & VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT && setPSampleLocations( const SampleLocationEXT * pSampleLocations_ ) && VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SampleLocationsInfoEXT & setSampleLocations( ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT { sampleLocationsCount = static_cast( sampleLocations_.size() ); pSampleLocations = sampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSampleLocationsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSampleLocationsInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSampleLocationsInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, sampleLocationsPerPixel, sampleLocationGridSize, sampleLocationsCount, pSampleLocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SampleLocationsInfoEXT const & ) const = default; #else bool operator==( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) && ( sampleLocationsCount == rhs.sampleLocationsCount ) && ( pSampleLocations == rhs.pSampleLocations ); # endif } bool operator!=( SampleLocationsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eSampleLocationsInfoEXT; const void * pNext = {}; SampleCountFlagBits sampleLocationsPerPixel = SampleCountFlagBits::e1; Extent2D sampleLocationGridSize = {}; uint32_t sampleLocationsCount = {}; const SampleLocationEXT * pSampleLocations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SampleLocationsInfoEXT; }; #endif template <> struct CppType { using Type = SampleLocationsInfoEXT; }; // wrapper struct for struct VkAttachmentSampleLocationsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkAttachmentSampleLocationsEXT.html struct AttachmentSampleLocationsEXT { using NativeType = VkAttachmentSampleLocationsEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT : attachmentIndex{ attachmentIndex_ } , sampleLocationsInfo{ sampleLocationsInfo_ } { } VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : AttachmentSampleLocationsEXT( *reinterpret_cast( &rhs ) ) { } AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) & VULKAN_HPP_NOEXCEPT { attachmentIndex = attachmentIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT && setAttachmentIndex( uint32_t attachmentIndex_ ) && VULKAN_HPP_NOEXCEPT { attachmentIndex = attachmentIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & setSampleLocationsInfo( SampleLocationsInfoEXT const & sampleLocationsInfo_ ) & VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT && setSampleLocationsInfo( SampleLocationsInfoEXT const & sampleLocationsInfo_ ) && VULKAN_HPP_NOEXCEPT { sampleLocationsInfo = sampleLocationsInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkAttachmentSampleLocationsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkAttachmentSampleLocationsEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkAttachmentSampleLocationsEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( attachmentIndex, sampleLocationsInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( AttachmentSampleLocationsEXT const & ) const = default; #else bool operator==( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( attachmentIndex == rhs.attachmentIndex ) && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); # endif } bool operator!=( AttachmentSampleLocationsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t attachmentIndex = {}; SampleLocationsInfoEXT sampleLocationsInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = AttachmentSampleLocationsEXT; }; #endif // wrapper struct for struct VkBaseInStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseInStructure.html struct BaseInStructure { using NativeType = VkBaseInStructure; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseInStructure( StructureType sType_ = StructureType::eApplicationInfo, const struct BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : sType{ sType_ } , pNext{ pNext_ } { } BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseInStructure( *reinterpret_cast( &rhs ) ) {} BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BaseInStructure & setPNext( const struct BaseInStructure * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BaseInStructure && setPNext( const struct BaseInStructure * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBaseInStructure const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBaseInStructure const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBaseInStructure *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BaseInStructure const & ) const = default; #else bool operator==( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( BaseInStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eApplicationInfo; const struct BaseInStructure * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BaseInStructure; }; #endif // wrapper struct for struct VkBaseOutStructure, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBaseOutStructure.html struct BaseOutStructure { using NativeType = VkBaseOutStructure; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseOutStructure( StructureType sType_ = StructureType::eApplicationInfo, struct BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : sType{ sType_ } , pNext{ pNext_ } { } BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT : BaseOutStructure( *reinterpret_cast( &rhs ) ) {} BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BaseOutStructure & setPNext( struct BaseOutStructure * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BaseOutStructure && setPNext( struct BaseOutStructure * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBaseOutStructure const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBaseOutStructure const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBaseOutStructure *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BaseOutStructure const & ) const = default; #else bool operator==( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( BaseOutStructure const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eApplicationInfo; struct BaseOutStructure * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BaseOutStructure; }; #endif // wrapper struct for struct VkBeginCustomResolveInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBeginCustomResolveInfoEXT.html struct BeginCustomResolveInfoEXT { using NativeType = VkBeginCustomResolveInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBeginCustomResolveInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BeginCustomResolveInfoEXT( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR BeginCustomResolveInfoEXT( BeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BeginCustomResolveInfoEXT( VkBeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BeginCustomResolveInfoEXT( *reinterpret_cast( &rhs ) ) { } BeginCustomResolveInfoEXT & operator=( BeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BeginCustomResolveInfoEXT & operator=( VkBeginCustomResolveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BeginCustomResolveInfoEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BeginCustomResolveInfoEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBeginCustomResolveInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBeginCustomResolveInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBeginCustomResolveInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBeginCustomResolveInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BeginCustomResolveInfoEXT const & ) const = default; #else bool operator==( BeginCustomResolveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( BeginCustomResolveInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBeginCustomResolveInfoEXT; void * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BeginCustomResolveInfoEXT; }; #endif template <> struct CppType { using Type = BeginCustomResolveInfoEXT; }; // wrapper struct for struct VkBindAccelerationStructureMemoryInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindAccelerationStructureMemoryInfoNV.html struct BindAccelerationStructureMemoryInfoNV { using NativeType = VkBindAccelerationStructureMemoryInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV accelerationStructure_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , accelerationStructure{ accelerationStructure_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } , deviceIndexCount{ deviceIndexCount_ } , pDeviceIndices{ pDeviceIndices_ } { } VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindAccelerationStructureMemoryInfoNV( AccelerationStructureNV accelerationStructure_, DeviceMemory memory_, DeviceSize memoryOffset_, ArrayProxyNoTemporaries const & deviceIndices_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , accelerationStructure( accelerationStructure_ ) , memory( memory_ ) , memoryOffset( memoryOffset_ ) , deviceIndexCount( static_cast( deviceIndices_.size() ) ) , pDeviceIndices( deviceIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setAccelerationStructure( AccelerationStructureNV accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) & VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setDeviceIndexCount( uint32_t deviceIndexCount_ ) && VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) & VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV && setPDeviceIndices( const uint32_t * pDeviceIndices_ ) && VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindAccelerationStructureMemoryInfoNV & setDeviceIndices( ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast( deviceIndices_.size() ); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindAccelerationStructureMemoryInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindAccelerationStructureMemoryInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindAccelerationStructureMemoryInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, accelerationStructure, memory, memoryOffset, deviceIndexCount, pDeviceIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindAccelerationStructureMemoryInfoNV const & ) const = default; #else bool operator==( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); # endif } bool operator!=( BindAccelerationStructureMemoryInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; const void * pNext = {}; AccelerationStructureNV accelerationStructure = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; uint32_t deviceIndexCount = {}; const uint32_t * pDeviceIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindAccelerationStructureMemoryInfoNV; }; #endif template <> struct CppType { using Type = BindAccelerationStructureMemoryInfoNV; }; // wrapper struct for struct VkBindBufferMemoryDeviceGroupInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryDeviceGroupInfo.html struct BindBufferMemoryDeviceGroupInfo { using NativeType = VkBindBufferMemoryDeviceGroupInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceIndexCount{ deviceIndexCount_ } , pDeviceIndices{ pDeviceIndices_ } { } VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindBufferMemoryDeviceGroupInfo( ArrayProxyNoTemporaries const & deviceIndices_, const void * pNext_ = nullptr ) : pNext( pNext_ ), deviceIndexCount( static_cast( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) & VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo && setDeviceIndexCount( uint32_t deviceIndexCount_ ) && VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) & VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo && setPDeviceIndices( const uint32_t * pDeviceIndices_ ) && VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindBufferMemoryDeviceGroupInfo & setDeviceIndices( ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast( deviceIndices_.size() ); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindBufferMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindBufferMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindBufferMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindBufferMemoryDeviceGroupInfo const & ) const = default; #else bool operator==( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ); # endif } bool operator!=( BindBufferMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; const void * pNext = {}; uint32_t deviceIndexCount = {}; const uint32_t * pDeviceIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindBufferMemoryDeviceGroupInfo; }; #endif template <> struct CppType { using Type = BindBufferMemoryDeviceGroupInfo; }; using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; // wrapper struct for struct VkBindBufferMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindBufferMemoryInfo.html struct BindBufferMemoryInfo { using NativeType = VkBindBufferMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( Buffer buffer_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } { } VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindBufferMemoryInfo( *reinterpret_cast( &rhs ) ) { } BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindBufferMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindBufferMemoryInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindBufferMemoryInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer, memory, memoryOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindBufferMemoryInfo const & ) const = default; #else bool operator==( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); # endif } bool operator!=( BindBufferMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindBufferMemoryInfo; const void * pNext = {}; Buffer buffer = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindBufferMemoryInfo; }; #endif template <> struct CppType { using Type = BindBufferMemoryInfo; }; using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; // wrapper struct for struct VkBindDataGraphPipelineSessionMemoryInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDataGraphPipelineSessionMemoryInfoARM.html struct BindDataGraphPipelineSessionMemoryInfoARM { using NativeType = VkBindDataGraphPipelineSessionMemoryInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDataGraphPipelineSessionMemoryInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindDataGraphPipelineSessionMemoryInfoARM( DataGraphPipelineSessionARM session_ = {}, DataGraphPipelineSessionBindPointARM bindPoint_ = DataGraphPipelineSessionBindPointARM::eTransient, uint32_t objectIndex_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , session{ session_ } , bindPoint{ bindPoint_ } , objectIndex{ objectIndex_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } { } VULKAN_HPP_CONSTEXPR BindDataGraphPipelineSessionMemoryInfoARM( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindDataGraphPipelineSessionMemoryInfoARM( VkBindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : BindDataGraphPipelineSessionMemoryInfoARM( *reinterpret_cast( &rhs ) ) { } BindDataGraphPipelineSessionMemoryInfoARM & operator=( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindDataGraphPipelineSessionMemoryInfoARM & operator=( VkBindDataGraphPipelineSessionMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setSession( DataGraphPipelineSessionARM session_ ) & VULKAN_HPP_NOEXCEPT { session = session_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setSession( DataGraphPipelineSessionARM session_ ) && VULKAN_HPP_NOEXCEPT { session = session_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) & VULKAN_HPP_NOEXCEPT { bindPoint = bindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) && VULKAN_HPP_NOEXCEPT { bindPoint = bindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setObjectIndex( uint32_t objectIndex_ ) & VULKAN_HPP_NOEXCEPT { objectIndex = objectIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setObjectIndex( uint32_t objectIndex_ ) && VULKAN_HPP_NOEXCEPT { objectIndex = objectIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDataGraphPipelineSessionMemoryInfoARM && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindDataGraphPipelineSessionMemoryInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDataGraphPipelineSessionMemoryInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDataGraphPipelineSessionMemoryInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindDataGraphPipelineSessionMemoryInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, session, bindPoint, objectIndex, memory, memoryOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindDataGraphPipelineSessionMemoryInfoARM const & ) const = default; #else bool operator==( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session ) && ( bindPoint == rhs.bindPoint ) && ( objectIndex == rhs.objectIndex ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); # endif } bool operator!=( BindDataGraphPipelineSessionMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindDataGraphPipelineSessionMemoryInfoARM; const void * pNext = {}; DataGraphPipelineSessionARM session = {}; DataGraphPipelineSessionBindPointARM bindPoint = DataGraphPipelineSessionBindPointARM::eTransient; uint32_t objectIndex = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindDataGraphPipelineSessionMemoryInfoARM; }; #endif template <> struct CppType { using Type = BindDataGraphPipelineSessionMemoryInfoARM; }; // wrapper struct for struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorBufferEmbeddedSamplersInfoEXT.html struct BindDescriptorBufferEmbeddedSamplersInfoEXT { using NativeType = VkBindDescriptorBufferEmbeddedSamplersInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( ShaderStageFlags stageFlags_ = {}, PipelineLayout layout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stageFlags{ stageFlags_ } , layout{ layout_ } , set{ set_ } { } VULKAN_HPP_CONSTEXPR BindDescriptorBufferEmbeddedSamplersInfoEXT( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindDescriptorBufferEmbeddedSamplersInfoEXT( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BindDescriptorBufferEmbeddedSamplersInfoEXT( *reinterpret_cast( &rhs ) ) { } BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindDescriptorBufferEmbeddedSamplersInfoEXT & operator=( VkBindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setStageFlags( ShaderStageFlags stageFlags_ ) & VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT && setStageFlags( ShaderStageFlags stageFlags_ ) && VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT & setSet( uint32_t set_ ) & VULKAN_HPP_NOEXCEPT { set = set_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorBufferEmbeddedSamplersInfoEXT && setSet( uint32_t set_ ) && VULKAN_HPP_NOEXCEPT { set = set_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindDescriptorBufferEmbeddedSamplersInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stageFlags, layout, set ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindDescriptorBufferEmbeddedSamplersInfoEXT const & ) const = default; #else bool operator==( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( set == rhs.set ); # endif } bool operator!=( BindDescriptorBufferEmbeddedSamplersInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT; const void * pNext = {}; ShaderStageFlags stageFlags = {}; PipelineLayout layout = {}; uint32_t set = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; }; #endif template <> struct CppType { using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; }; // wrapper struct for struct VkBindDescriptorSetsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindDescriptorSetsInfo.html struct BindDescriptorSetsInfo { using NativeType = VkBindDescriptorSetsInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( ShaderStageFlags stageFlags_ = {}, PipelineLayout layout_ = {}, uint32_t firstSet_ = {}, uint32_t descriptorSetCount_ = {}, const DescriptorSet * pDescriptorSets_ = {}, uint32_t dynamicOffsetCount_ = {}, const uint32_t * pDynamicOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stageFlags{ stageFlags_ } , layout{ layout_ } , firstSet{ firstSet_ } , descriptorSetCount{ descriptorSetCount_ } , pDescriptorSets{ pDescriptorSets_ } , dynamicOffsetCount{ dynamicOffsetCount_ } , pDynamicOffsets{ pDynamicOffsets_ } { } VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindDescriptorSetsInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindDescriptorSetsInfo( ShaderStageFlags stageFlags_, PipelineLayout layout_, uint32_t firstSet_, ArrayProxyNoTemporaries const & descriptorSets_, ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , stageFlags( stageFlags_ ) , layout( layout_ ) , firstSet( firstSet_ ) , descriptorSetCount( static_cast( descriptorSets_.size() ) ) , pDescriptorSets( descriptorSets_.data() ) , dynamicOffsetCount( static_cast( dynamicOffsets_.size() ) ) , pDynamicOffsets( dynamicOffsets_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( ShaderStageFlags stageFlags_ ) & VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setStageFlags( ShaderStageFlags stageFlags_ ) && VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) & VULKAN_HPP_NOEXCEPT { firstSet = firstSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setFirstSet( uint32_t firstSet_ ) && VULKAN_HPP_NOEXCEPT { firstSet = firstSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setDescriptorSetCount( uint32_t descriptorSetCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const DescriptorSet * pDescriptorSets_ ) & VULKAN_HPP_NOEXCEPT { pDescriptorSets = pDescriptorSets_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setPDescriptorSets( const DescriptorSet * pDescriptorSets_ ) && VULKAN_HPP_NOEXCEPT { pDescriptorSets = pDescriptorSets_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindDescriptorSetsInfo & setDescriptorSets( ArrayProxyNoTemporaries const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast( descriptorSets_.size() ); pDescriptorSets = descriptorSets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) & VULKAN_HPP_NOEXCEPT { dynamicOffsetCount = dynamicOffsetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) && VULKAN_HPP_NOEXCEPT { dynamicOffsetCount = dynamicOffsetCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) & VULKAN_HPP_NOEXCEPT { pDynamicOffsets = pDynamicOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo && setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) && VULKAN_HPP_NOEXCEPT { pDynamicOffsets = pDynamicOffsets_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindDescriptorSetsInfo & setDynamicOffsets( ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT { dynamicOffsetCount = static_cast( dynamicOffsets_.size() ); pDynamicOffsets = dynamicOffsets_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindDescriptorSetsInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindDescriptorSetsInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stageFlags, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindDescriptorSetsInfo const & ) const = default; #else bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageFlags == rhs.stageFlags ) && ( layout == rhs.layout ) && ( firstSet == rhs.firstSet ) && ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorSets == rhs.pDescriptorSets ) && ( dynamicOffsetCount == rhs.dynamicOffsetCount ) && ( pDynamicOffsets == rhs.pDynamicOffsets ); # endif } bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindDescriptorSetsInfo; const void * pNext = {}; ShaderStageFlags stageFlags = {}; PipelineLayout layout = {}; uint32_t firstSet = {}; uint32_t descriptorSetCount = {}; const DescriptorSet * pDescriptorSets = {}; uint32_t dynamicOffsetCount = {}; const uint32_t * pDynamicOffsets = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindDescriptorSetsInfo; }; #endif template <> struct CppType { using Type = BindDescriptorSetsInfo; }; using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; // wrapper struct for struct VkDeviceAddressRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressRangeEXT.html struct DeviceAddressRangeEXT { using NativeType = VkDeviceAddressRangeEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceAddressRangeEXT( DeviceAddress address_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : address{ address_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR DeviceAddressRangeEXT( DeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceAddressRangeEXT( VkDeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceAddressRangeEXT( *reinterpret_cast( &rhs ) ) { } DeviceAddressRangeEXT & operator=( DeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceAddressRangeEXT & operator=( VkDeviceAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT & setAddress( DeviceAddress address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT && setAddress( DeviceAddress address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressRangeEXT && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceAddressRangeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceAddressRangeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceAddressRangeEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceAddressRangeEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( address, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceAddressRangeEXT const & ) const = default; #else bool operator==( DeviceAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( address == rhs.address ) && ( size == rhs.size ); # endif } bool operator!=( DeviceAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress address = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceAddressRangeEXT; }; #endif // wrapper struct for struct VkBindHeapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindHeapInfoEXT.html struct BindHeapInfoEXT { using NativeType = VkBindHeapInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindHeapInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindHeapInfoEXT( DeviceAddressRangeEXT heapRange_ = {}, DeviceSize reservedRangeOffset_ = {}, DeviceSize reservedRangeSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , heapRange{ heapRange_ } , reservedRangeOffset{ reservedRangeOffset_ } , reservedRangeSize{ reservedRangeSize_ } { } VULKAN_HPP_CONSTEXPR BindHeapInfoEXT( BindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindHeapInfoEXT( VkBindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BindHeapInfoEXT( *reinterpret_cast( &rhs ) ) {} BindHeapInfoEXT & operator=( BindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindHeapInfoEXT & operator=( VkBindHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setHeapRange( DeviceAddressRangeEXT const & heapRange_ ) & VULKAN_HPP_NOEXCEPT { heapRange = heapRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setHeapRange( DeviceAddressRangeEXT const & heapRange_ ) && VULKAN_HPP_NOEXCEPT { heapRange = heapRange_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setReservedRangeOffset( DeviceSize reservedRangeOffset_ ) & VULKAN_HPP_NOEXCEPT { reservedRangeOffset = reservedRangeOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setReservedRangeOffset( DeviceSize reservedRangeOffset_ ) && VULKAN_HPP_NOEXCEPT { reservedRangeOffset = reservedRangeOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT & setReservedRangeSize( DeviceSize reservedRangeSize_ ) & VULKAN_HPP_NOEXCEPT { reservedRangeSize = reservedRangeSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindHeapInfoEXT && setReservedRangeSize( DeviceSize reservedRangeSize_ ) && VULKAN_HPP_NOEXCEPT { reservedRangeSize = reservedRangeSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindHeapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindHeapInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindHeapInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindHeapInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, heapRange, reservedRangeOffset, reservedRangeSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindHeapInfoEXT const & ) const = default; #else bool operator==( BindHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapRange == rhs.heapRange ) && ( reservedRangeOffset == rhs.reservedRangeOffset ) && ( reservedRangeSize == rhs.reservedRangeSize ); # endif } bool operator!=( BindHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindHeapInfoEXT; const void * pNext = {}; DeviceAddressRangeEXT heapRange = {}; DeviceSize reservedRangeOffset = {}; DeviceSize reservedRangeSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindHeapInfoEXT; }; #endif template <> struct CppType { using Type = BindHeapInfoEXT; }; // wrapper struct for struct VkOffset2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset2D.html struct Offset2D { using NativeType = VkOffset2D; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } { } VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset2D( *reinterpret_cast( &rhs ) ) {} Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Offset2D & setX( int32_t x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset2D && setX( int32_t x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Offset2D & setY( int32_t y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset2D && setY( int32_t y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOffset2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOffset2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOffset2D const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOffset2D *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Offset2D const & ) const = default; #else bool operator==( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ); # endif } bool operator!=( Offset2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: int32_t x = {}; int32_t y = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Offset2D; }; #endif // wrapper struct for struct VkRect2D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkRect2D.html struct Rect2D { using NativeType = VkRect2D; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Rect2D( Offset2D offset_ = {}, Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT : offset{ offset_ } , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT : Rect2D( *reinterpret_cast( &rhs ) ) {} Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Rect2D & setOffset( Offset2D const & offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 Rect2D && setOffset( Offset2D const & offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Rect2D & setExtent( Extent2D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 Rect2D && setExtent( Extent2D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkRect2D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkRect2D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkRect2D const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkRect2D *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( offset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Rect2D const & ) const = default; #else bool operator==( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( offset == rhs.offset ) && ( extent == rhs.extent ); # endif } bool operator!=( Rect2D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Offset2D offset = {}; Extent2D extent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Rect2D; }; #endif // wrapper struct for struct VkBindImageMemoryDeviceGroupInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryDeviceGroupInfo.html struct BindImageMemoryDeviceGroupInfo { using NativeType = VkBindImageMemoryDeviceGroupInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const Rect2D * pSplitInstanceBindRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceIndexCount{ deviceIndexCount_ } , pDeviceIndices{ pDeviceIndices_ } , splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ } , pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ } { } VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryDeviceGroupInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindImageMemoryDeviceGroupInfo( ArrayProxyNoTemporaries const & deviceIndices_, ArrayProxyNoTemporaries const & splitInstanceBindRegions_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , deviceIndexCount( static_cast( deviceIndices_.size() ) ) , pDeviceIndices( deviceIndices_.data() ) , splitInstanceBindRegionCount( static_cast( splitInstanceBindRegions_.size() ) ) , pSplitInstanceBindRegions( splitInstanceBindRegions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) & VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo && setDeviceIndexCount( uint32_t deviceIndexCount_ ) && VULKAN_HPP_NOEXCEPT { deviceIndexCount = deviceIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t * pDeviceIndices_ ) & VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo && setPDeviceIndices( const uint32_t * pDeviceIndices_ ) && VULKAN_HPP_NOEXCEPT { pDeviceIndices = pDeviceIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindImageMemoryDeviceGroupInfo & setDeviceIndices( ArrayProxyNoTemporaries const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT { deviceIndexCount = static_cast( deviceIndices_.size() ); pDeviceIndices = deviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) & VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo && setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) && VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = splitInstanceBindRegionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const Rect2D * pSplitInstanceBindRegions_ ) & VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo && setPSplitInstanceBindRegions( const Rect2D * pSplitInstanceBindRegions_ ) && VULKAN_HPP_NOEXCEPT { pSplitInstanceBindRegions = pSplitInstanceBindRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( ArrayProxyNoTemporaries const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT { splitInstanceBindRegionCount = static_cast( splitInstanceBindRegions_.size() ); pSplitInstanceBindRegions = splitInstanceBindRegions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemoryDeviceGroupInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemoryDeviceGroupInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindImageMemoryDeviceGroupInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceIndexCount, pDeviceIndices, splitInstanceBindRegionCount, pSplitInstanceBindRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindImageMemoryDeviceGroupInfo const & ) const = default; #else bool operator==( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndexCount == rhs.deviceIndexCount ) && ( pDeviceIndices == rhs.pDeviceIndices ) && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); # endif } bool operator!=( BindImageMemoryDeviceGroupInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; const void * pNext = {}; uint32_t deviceIndexCount = {}; const uint32_t * pDeviceIndices = {}; uint32_t splitInstanceBindRegionCount = {}; const Rect2D * pSplitInstanceBindRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindImageMemoryDeviceGroupInfo; }; #endif template <> struct CppType { using Type = BindImageMemoryDeviceGroupInfo; }; using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; // wrapper struct for struct VkBindImageMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemoryInfo.html struct BindImageMemoryInfo { using NativeType = VkBindImageMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( Image image_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } { } VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemoryInfo( *reinterpret_cast( &rhs ) ) { } BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemoryInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindImageMemoryInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, memory, memoryOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindImageMemoryInfo const & ) const = default; #else bool operator==( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); # endif } bool operator!=( BindImageMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindImageMemoryInfo; const void * pNext = {}; Image image = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindImageMemoryInfo; }; #endif template <> struct CppType { using Type = BindImageMemoryInfo; }; using BindImageMemoryInfoKHR = BindImageMemoryInfo; // wrapper struct for struct VkBindImageMemorySwapchainInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImageMemorySwapchainInfoKHR.html struct BindImageMemorySwapchainInfoKHR { using NativeType = VkBindImageMemorySwapchainInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , swapchain{ swapchain_ } , imageIndex{ imageIndex_ } { } VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : BindImageMemorySwapchainInfoKHR( *reinterpret_cast( &rhs ) ) { } BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) & VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR && setSwapchain( SwapchainKHR swapchain_ ) && VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) & VULKAN_HPP_NOEXCEPT { imageIndex = imageIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR && setImageIndex( uint32_t imageIndex_ ) && VULKAN_HPP_NOEXCEPT { imageIndex = imageIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImageMemorySwapchainInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImageMemorySwapchainInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindImageMemorySwapchainInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, swapchain, imageIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindImageMemorySwapchainInfoKHR const & ) const = default; #else bool operator==( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ) && ( imageIndex == rhs.imageIndex ); # endif } bool operator!=( BindImageMemorySwapchainInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; const void * pNext = {}; SwapchainKHR swapchain = {}; uint32_t imageIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindImageMemorySwapchainInfoKHR; }; #endif template <> struct CppType { using Type = BindImageMemorySwapchainInfoKHR; }; // wrapper struct for struct VkBindImagePlaneMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindImagePlaneMemoryInfo.html struct BindImagePlaneMemoryInfo { using NativeType = VkBindImagePlaneMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , planeAspect{ planeAspect_ } { } VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindImagePlaneMemoryInfo( *reinterpret_cast( &rhs ) ) { } BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) & VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo && setPlaneAspect( ImageAspectFlagBits planeAspect_ ) && VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindImagePlaneMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindImagePlaneMemoryInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindImagePlaneMemoryInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, planeAspect ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindImagePlaneMemoryInfo const & ) const = default; #else bool operator==( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); # endif } bool operator!=( BindImagePlaneMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindImagePlaneMemoryInfo; const void * pNext = {}; ImageAspectFlagBits planeAspect = ImageAspectFlagBits::eColor; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindImagePlaneMemoryInfo; }; #endif template <> struct CppType { using Type = BindImagePlaneMemoryInfo; }; using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; // wrapper struct for struct VkBindIndexBufferIndirectCommandEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandEXT.html struct BindIndexBufferIndirectCommandEXT { using NativeType = VkBindIndexBufferIndirectCommandEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, IndexType indexType_ = IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT : bufferAddress{ bufferAddress_ } , size{ size_ } , indexType{ indexType_ } { } VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BindIndexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT && setBufferAddress( DeviceAddress bufferAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferAddress, size, indexType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default; #else bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); # endif } bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress bufferAddress = {}; uint32_t size = {}; IndexType indexType = IndexType::eUint16; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindIndexBufferIndirectCommandEXT; }; #endif // wrapper struct for struct VkBindIndexBufferIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindIndexBufferIndirectCommandNV.html struct BindIndexBufferIndirectCommandNV { using NativeType = VkBindIndexBufferIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, IndexType indexType_ = IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT : bufferAddress{ bufferAddress_ } , size{ size_ } , indexType{ indexType_ } { } VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : BindIndexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setBufferAddress( DeviceAddress bufferAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV && setBufferAddress( DeviceAddress bufferAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & setIndexType( IndexType indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV && setIndexType( IndexType indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindIndexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindIndexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferAddress, size, indexType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindIndexBufferIndirectCommandNV const & ) const = default; #else bool operator==( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); # endif } bool operator!=( BindIndexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress bufferAddress = {}; uint32_t size = {}; IndexType indexType = IndexType::eUint16; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindIndexBufferIndirectCommandNV; }; #endif // wrapper struct for struct VkBindMemoryStatus, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindMemoryStatus.html struct BindMemoryStatus { using NativeType = VkBindMemoryStatus; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatus; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindMemoryStatus( Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pResult{ pResult_ } { } VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatus( *reinterpret_cast( &rhs ) ) {} BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( Result * pResult_ ) & VULKAN_HPP_NOEXCEPT { pResult = pResult_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus && setPResult( Result * pResult_ ) && VULKAN_HPP_NOEXCEPT { pResult = pResult_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindMemoryStatus const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindMemoryStatus *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pResult ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindMemoryStatus const & ) const = default; #else bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pResult == rhs.pResult ); # endif } bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindMemoryStatus; const void * pNext = {}; Result * pResult = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindMemoryStatus; }; #endif template <> struct CppType { using Type = BindMemoryStatus; }; using BindMemoryStatusKHR = BindMemoryStatus; // wrapper struct for struct VkBindPipelineIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindPipelineIndirectCommandNV.html struct BindPipelineIndirectCommandNV { using NativeType = VkBindPipelineIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT : pipelineAddress{ pipelineAddress_ } {} VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindPipelineIndirectCommandNV( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : BindPipelineIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } BindPipelineIndirectCommandNV & operator=( BindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindPipelineIndirectCommandNV & operator=( VkBindPipelineIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV & setPipelineAddress( DeviceAddress pipelineAddress_ ) & VULKAN_HPP_NOEXCEPT { pipelineAddress = pipelineAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindPipelineIndirectCommandNV && setPipelineAddress( DeviceAddress pipelineAddress_ ) && VULKAN_HPP_NOEXCEPT { pipelineAddress = pipelineAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindPipelineIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindPipelineIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindPipelineIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindPipelineIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( pipelineAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindPipelineIndirectCommandNV const & ) const = default; #else bool operator==( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( pipelineAddress == rhs.pipelineAddress ); # endif } bool operator!=( BindPipelineIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress pipelineAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindPipelineIndirectCommandNV; }; #endif // wrapper struct for struct VkBindShaderGroupIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindShaderGroupIndirectCommandNV.html struct BindShaderGroupIndirectCommandNV { using NativeType = VkBindShaderGroupIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex{ groupIndex_ } {} VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : BindShaderGroupIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) & VULKAN_HPP_NOEXCEPT { groupIndex = groupIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV && setGroupIndex( uint32_t groupIndex_ ) && VULKAN_HPP_NOEXCEPT { groupIndex = groupIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindShaderGroupIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindShaderGroupIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindShaderGroupIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( groupIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindShaderGroupIndirectCommandNV const & ) const = default; #else bool operator==( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( groupIndex == rhs.groupIndex ); # endif } bool operator!=( BindShaderGroupIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t groupIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindShaderGroupIndirectCommandNV; }; #endif // wrapper struct for struct VkSparseMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseMemoryBind.html struct SparseMemoryBind { using NativeType = VkSparseMemoryBind; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseMemoryBind( DeviceSize resourceOffset_ = {}, DeviceSize size_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : resourceOffset{ resourceOffset_ } , size{ size_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseMemoryBind( *reinterpret_cast( &rhs ) ) {} SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setResourceOffset( DeviceSize resourceOffset_ ) & VULKAN_HPP_NOEXCEPT { resourceOffset = resourceOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind && setResourceOffset( DeviceSize resourceOffset_ ) && VULKAN_HPP_NOEXCEPT { resourceOffset = resourceOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & setFlags( SparseMemoryBindFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind && setFlags( SparseMemoryBindFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseMemoryBind const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseMemoryBind const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSparseMemoryBind *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( resourceOffset, size, memory, memoryOffset, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SparseMemoryBind const & ) const = default; #else bool operator==( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( resourceOffset == rhs.resourceOffset ) && ( size == rhs.size ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); # endif } bool operator!=( SparseMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize resourceOffset = {}; DeviceSize size = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; SparseMemoryBindFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SparseMemoryBind; }; #endif // wrapper struct for struct VkSparseBufferMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseBufferMemoryBindInfo.html struct SparseBufferMemoryBindInfo { using NativeType = VkSparseBufferMemoryBindInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( Buffer buffer_ = {}, uint32_t bindCount_ = {}, const SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : buffer{ buffer_ } , bindCount{ bindCount_ } , pBinds{ pBinds_ } { } VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SparseBufferMemoryBindInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseBufferMemoryBindInfo( Buffer buffer_, ArrayProxyNoTemporaries const & binds_ ) : buffer( buffer_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) & VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo && setBindCount( uint32_t bindCount_ ) && VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & setPBinds( const SparseMemoryBind * pBinds_ ) & VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo && setPBinds( const SparseMemoryBind * pBinds_ ) && VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseBufferMemoryBindInfo & setBinds( ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT { bindCount = static_cast( binds_.size() ); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseBufferMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseBufferMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSparseBufferMemoryBindInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( buffer, bindCount, pBinds ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SparseBufferMemoryBindInfo const & ) const = default; #else bool operator==( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( buffer == rhs.buffer ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); # endif } bool operator!=( SparseBufferMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Buffer buffer = {}; uint32_t bindCount = {}; const SparseMemoryBind * pBinds = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SparseBufferMemoryBindInfo; }; #endif // wrapper struct for struct VkSparseImageOpaqueMemoryBindInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageOpaqueMemoryBindInfo.html struct SparseImageOpaqueMemoryBindInfo { using NativeType = VkSparseImageOpaqueMemoryBindInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( Image image_ = {}, uint32_t bindCount_ = {}, const SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image{ image_ } , bindCount{ bindCount_ } , pBinds{ pBinds_ } { } VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseImageOpaqueMemoryBindInfo( Image image_, ArrayProxyNoTemporaries const & binds_ ) : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) & VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo && setBindCount( uint32_t bindCount_ ) && VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & setPBinds( const SparseMemoryBind * pBinds_ ) & VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo && setPBinds( const SparseMemoryBind * pBinds_ ) && VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseImageOpaqueMemoryBindInfo & setBinds( ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT { bindCount = static_cast( binds_.size() ); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageOpaqueMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageOpaqueMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSparseImageOpaqueMemoryBindInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( image, bindCount, pBinds ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SparseImageOpaqueMemoryBindInfo const & ) const = default; #else bool operator==( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); # endif } bool operator!=( SparseImageOpaqueMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Image image = {}; uint32_t bindCount = {}; const SparseMemoryBind * pBinds = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SparseImageOpaqueMemoryBindInfo; }; #endif // wrapper struct for struct VkImageSubresource, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource.html struct ImageSubresource { using NativeType = VkImageSubresource; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource( ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask{ aspectMask_ } , mipLevel{ mipLevel_ } , arrayLayer{ arrayLayer_ } { } VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource( *reinterpret_cast( &rhs ) ) {} ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setMipLevel( uint32_t mipLevel_ ) & VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource && setMipLevel( uint32_t mipLevel_ ) && VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) & VULKAN_HPP_NOEXCEPT { arrayLayer = arrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource && setArrayLayer( uint32_t arrayLayer_ ) && VULKAN_HPP_NOEXCEPT { arrayLayer = arrayLayer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresource const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresource const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSubresource *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( aspectMask, mipLevel, arrayLayer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSubresource const & ) const = default; #else bool operator==( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( arrayLayer == rhs.arrayLayer ); # endif } bool operator!=( ImageSubresource const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageAspectFlags aspectMask = {}; uint32_t mipLevel = {}; uint32_t arrayLayer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSubresource; }; #endif // wrapper struct for struct VkOffset3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOffset3D.html struct Offset3D { using NativeType = VkOffset3D; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } , z{ z_ } { } VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT : Offset3D( *reinterpret_cast( &rhs ) ) {} explicit Offset3D( Offset2D const & offset2D, int32_t z_ = {} ) : x( offset2D.x ), y( offset2D.y ), z( z_ ) {} Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Offset3D & setX( int32_t x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset3D && setX( int32_t x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Offset3D & setY( int32_t y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset3D && setY( int32_t y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Offset3D & setZ( int32_t z_ ) & VULKAN_HPP_NOEXCEPT { z = z_; return *this; } VULKAN_HPP_CONSTEXPR_14 Offset3D && setZ( int32_t z_ ) && VULKAN_HPP_NOEXCEPT { z = z_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOffset3D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOffset3D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOffset3D const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOffset3D *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y, z ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Offset3D const & ) const = default; #else bool operator==( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); # endif } bool operator!=( Offset3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: int32_t x = {}; int32_t y = {}; int32_t z = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Offset3D; }; #endif // wrapper struct for struct VkExtent3D, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtent3D.html struct Extent3D { using NativeType = VkExtent3D; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT : width{ width_ } , height{ height_ } , depth{ depth_ } { } VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT : Extent3D( *reinterpret_cast( &rhs ) ) {} explicit Extent3D( Extent2D const & extent2D, uint32_t depth_ = {} ) : width( extent2D.width ), height( extent2D.height ), depth( depth_ ) {} Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Extent3D & setWidth( uint32_t width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent3D && setWidth( uint32_t width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Extent3D & setHeight( uint32_t height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent3D && setHeight( uint32_t height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Extent3D & setDepth( uint32_t depth_ ) & VULKAN_HPP_NOEXCEPT { depth = depth_; return *this; } VULKAN_HPP_CONSTEXPR_14 Extent3D && setDepth( uint32_t depth_ ) && VULKAN_HPP_NOEXCEPT { depth = depth_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExtent3D const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtent3D &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtent3D const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExtent3D *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( width, height, depth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Extent3D const & ) const = default; #else bool operator==( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( width == rhs.width ) && ( height == rhs.height ) && ( depth == rhs.depth ); # endif } bool operator!=( Extent3D const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t width = {}; uint32_t height = {}; uint32_t depth = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Extent3D; }; #endif // wrapper struct for struct VkSparseImageMemoryBind, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBind.html struct SparseImageMemoryBind { using NativeType = VkSparseImageMemoryBind; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( ImageSubresource subresource_ = {}, Offset3D offset_ = {}, Extent3D extent_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : subresource{ subresource_ } , offset{ offset_ } , extent{ extent_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT : SparseImageMemoryBind( *reinterpret_cast( &rhs ) ) { } SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setSubresource( ImageSubresource const & subresource_ ) & VULKAN_HPP_NOEXCEPT { subresource = subresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setSubresource( ImageSubresource const & subresource_ ) && VULKAN_HPP_NOEXCEPT { subresource = subresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setOffset( Offset3D const & offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setOffset( Offset3D const & offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & setFlags( SparseMemoryBindFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind && setFlags( SparseMemoryBindFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageMemoryBind const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageMemoryBind const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSparseImageMemoryBind *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( subresource, offset, extent, memory, memoryOffset, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SparseImageMemoryBind const & ) const = default; #else bool operator==( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( subresource == rhs.subresource ) && ( offset == rhs.offset ) && ( extent == rhs.extent ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( flags == rhs.flags ); # endif } bool operator!=( SparseImageMemoryBind const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageSubresource subresource = {}; Offset3D offset = {}; Extent3D extent = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; SparseMemoryBindFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SparseImageMemoryBind; }; #endif // wrapper struct for struct VkSparseImageMemoryBindInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSparseImageMemoryBindInfo.html struct SparseImageMemoryBindInfo { using NativeType = VkSparseImageMemoryBindInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( Image image_ = {}, uint32_t bindCount_ = {}, const SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT : image{ image_ } , bindCount{ bindCount_ } , pBinds{ pBinds_ } { } VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SparseImageMemoryBindInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseImageMemoryBindInfo( Image image_, ArrayProxyNoTemporaries const & binds_ ) : image( image_ ), bindCount( static_cast( binds_.size() ) ), pBinds( binds_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) & VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo && setBindCount( uint32_t bindCount_ ) && VULKAN_HPP_NOEXCEPT { bindCount = bindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & setPBinds( const SparseImageMemoryBind * pBinds_ ) & VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo && setPBinds( const SparseImageMemoryBind * pBinds_ ) && VULKAN_HPP_NOEXCEPT { pBinds = pBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SparseImageMemoryBindInfo & setBinds( ArrayProxyNoTemporaries const & binds_ ) VULKAN_HPP_NOEXCEPT { bindCount = static_cast( binds_.size() ); pBinds = binds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSparseImageMemoryBindInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSparseImageMemoryBindInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSparseImageMemoryBindInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( image, bindCount, pBinds ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SparseImageMemoryBindInfo const & ) const = default; #else bool operator==( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( image == rhs.image ) && ( bindCount == rhs.bindCount ) && ( pBinds == rhs.pBinds ); # endif } bool operator!=( SparseImageMemoryBindInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Image image = {}; uint32_t bindCount = {}; const SparseImageMemoryBind * pBinds = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SparseImageMemoryBindInfo; }; #endif // wrapper struct for struct VkBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindSparseInfo.html struct BindSparseInfo { using NativeType = VkBindSparseInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, const Semaphore * pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const SparseBufferMemoryBindInfo * pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const SparseImageMemoryBindInfo * pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , waitSemaphoreCount{ waitSemaphoreCount_ } , pWaitSemaphores{ pWaitSemaphores_ } , bufferBindCount{ bufferBindCount_ } , pBufferBinds{ pBufferBinds_ } , imageOpaqueBindCount{ imageOpaqueBindCount_ } , pImageOpaqueBinds{ pImageOpaqueBinds_ } , imageBindCount{ imageBindCount_ } , pImageBinds{ pImageBinds_ } , signalSemaphoreCount{ signalSemaphoreCount_ } , pSignalSemaphores{ pSignalSemaphores_ } { } VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BindSparseInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo( ArrayProxyNoTemporaries const & waitSemaphores_, ArrayProxyNoTemporaries const & bufferBinds_ = {}, ArrayProxyNoTemporaries const & imageOpaqueBinds_ = {}, ArrayProxyNoTemporaries const & imageBinds_ = {}, ArrayProxyNoTemporaries const & signalSemaphores_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , waitSemaphoreCount( static_cast( waitSemaphores_.size() ) ) , pWaitSemaphores( waitSemaphores_.data() ) , bufferBindCount( static_cast( bufferBinds_.size() ) ) , pBufferBinds( bufferBinds_.data() ) , imageOpaqueBindCount( static_cast( imageOpaqueBinds_.size() ) ) , pImageOpaqueBinds( imageOpaqueBinds_.data() ) , imageBindCount( static_cast( imageBinds_.size() ) ) , pImageBinds( imageBinds_.data() ) , signalSemaphoreCount( static_cast( signalSemaphores_.size() ) ) , pSignalSemaphores( signalSemaphores_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) & VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) && VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPWaitSemaphores( const Semaphore * pWaitSemaphores_ ) & VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPWaitSemaphores( const Semaphore * pWaitSemaphores_ ) && VULKAN_HPP_NOEXCEPT { pWaitSemaphores = pWaitSemaphores_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo & setWaitSemaphores( ArrayProxyNoTemporaries const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast( waitSemaphores_.size() ); pWaitSemaphores = waitSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) & VULKAN_HPP_NOEXCEPT { bufferBindCount = bufferBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setBufferBindCount( uint32_t bufferBindCount_ ) && VULKAN_HPP_NOEXCEPT { bufferBindCount = bufferBindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPBufferBinds( const SparseBufferMemoryBindInfo * pBufferBinds_ ) & VULKAN_HPP_NOEXCEPT { pBufferBinds = pBufferBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPBufferBinds( const SparseBufferMemoryBindInfo * pBufferBinds_ ) && VULKAN_HPP_NOEXCEPT { pBufferBinds = pBufferBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo & setBufferBinds( ArrayProxyNoTemporaries const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT { bufferBindCount = static_cast( bufferBinds_.size() ); pBufferBinds = bufferBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) & VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = imageOpaqueBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) && VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = imageOpaqueBindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) & VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPImageOpaqueBinds( const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds_ ) && VULKAN_HPP_NOEXCEPT { pImageOpaqueBinds = pImageOpaqueBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo & setImageOpaqueBinds( ArrayProxyNoTemporaries const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT { imageOpaqueBindCount = static_cast( imageOpaqueBinds_.size() ); pImageOpaqueBinds = imageOpaqueBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) & VULKAN_HPP_NOEXCEPT { imageBindCount = imageBindCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setImageBindCount( uint32_t imageBindCount_ ) && VULKAN_HPP_NOEXCEPT { imageBindCount = imageBindCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPImageBinds( const SparseImageMemoryBindInfo * pImageBinds_ ) & VULKAN_HPP_NOEXCEPT { pImageBinds = pImageBinds_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPImageBinds( const SparseImageMemoryBindInfo * pImageBinds_ ) && VULKAN_HPP_NOEXCEPT { pImageBinds = pImageBinds_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo & setImageBinds( ArrayProxyNoTemporaries const & imageBinds_ ) VULKAN_HPP_NOEXCEPT { imageBindCount = static_cast( imageBinds_.size() ); pImageBinds = imageBinds_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) & VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) && VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & setPSignalSemaphores( const Semaphore * pSignalSemaphores_ ) & VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindSparseInfo && setPSignalSemaphores( const Semaphore * pSignalSemaphores_ ) && VULKAN_HPP_NOEXCEPT { pSignalSemaphores = pSignalSemaphores_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BindSparseInfo & setSignalSemaphores( ArrayProxyNoTemporaries const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = static_cast( signalSemaphores_.size() ); pSignalSemaphores = signalSemaphores_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindSparseInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphores, bufferBindCount, pBufferBinds, imageOpaqueBindCount, pImageOpaqueBinds, imageBindCount, pImageBinds, signalSemaphoreCount, pSignalSemaphores ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindSparseInfo const & ) const = default; #else bool operator==( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && ( pWaitSemaphores == rhs.pWaitSemaphores ) && ( bufferBindCount == rhs.bufferBindCount ) && ( pBufferBinds == rhs.pBufferBinds ) && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) && ( imageBindCount == rhs.imageBindCount ) && ( pImageBinds == rhs.pImageBinds ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphores == rhs.pSignalSemaphores ); # endif } bool operator!=( BindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindSparseInfo; const void * pNext = {}; uint32_t waitSemaphoreCount = {}; const Semaphore * pWaitSemaphores = {}; uint32_t bufferBindCount = {}; const SparseBufferMemoryBindInfo * pBufferBinds = {}; uint32_t imageOpaqueBindCount = {}; const SparseImageOpaqueMemoryBindInfo * pImageOpaqueBinds = {}; uint32_t imageBindCount = {}; const SparseImageMemoryBindInfo * pImageBinds = {}; uint32_t signalSemaphoreCount = {}; const Semaphore * pSignalSemaphores = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindSparseInfo; }; #endif template <> struct CppType { using Type = BindSparseInfo; }; // wrapper struct for struct VkBindTensorMemoryInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindTensorMemoryInfoARM.html struct BindTensorMemoryInfoARM { using NativeType = VkBindTensorMemoryInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindTensorMemoryInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM( TensorARM tensor_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensor{ tensor_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } { } VULKAN_HPP_CONSTEXPR BindTensorMemoryInfoARM( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindTensorMemoryInfoARM( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : BindTensorMemoryInfoARM( *reinterpret_cast( &rhs ) ) { } BindTensorMemoryInfoARM & operator=( BindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindTensorMemoryInfoARM & operator=( VkBindTensorMemoryInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setTensor( TensorARM tensor_ ) & VULKAN_HPP_NOEXCEPT { tensor = tensor_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM && setTensor( TensorARM tensor_ ) && VULKAN_HPP_NOEXCEPT { tensor = tensor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindTensorMemoryInfoARM && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindTensorMemoryInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindTensorMemoryInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindTensorMemoryInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindTensorMemoryInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensor, memory, memoryOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindTensorMemoryInfoARM const & ) const = default; #else bool operator==( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ); # endif } bool operator!=( BindTensorMemoryInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindTensorMemoryInfoARM; const void * pNext = {}; TensorARM tensor = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindTensorMemoryInfoARM; }; #endif template <> struct CppType { using Type = BindTensorMemoryInfoARM; }; // wrapper struct for struct VkBindVertexBufferIndirectCommandEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandEXT.html struct BindVertexBufferIndirectCommandEXT { using NativeType = VkBindVertexBufferIndirectCommandEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : bufferAddress{ bufferAddress_ } , size{ size_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BindVertexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT && setBufferAddress( DeviceAddress bufferAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT && setStride( uint32_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferAddress, size, stride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default; #else bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); # endif } bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress bufferAddress = {}; uint32_t size = {}; uint32_t stride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindVertexBufferIndirectCommandEXT; }; #endif // wrapper struct for struct VkBindVertexBufferIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVertexBufferIndirectCommandNV.html struct BindVertexBufferIndirectCommandNV { using NativeType = VkBindVertexBufferIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : bufferAddress{ bufferAddress_ } , size{ size_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : BindVertexBufferIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setBufferAddress( DeviceAddress bufferAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV && setBufferAddress( DeviceAddress bufferAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV && setStride( uint32_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindVertexBufferIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindVertexBufferIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferAddress, size, stride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindVertexBufferIndirectCommandNV const & ) const = default; #else bool operator==( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); # endif } bool operator!=( BindVertexBufferIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress bufferAddress = {}; uint32_t size = {}; uint32_t stride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindVertexBufferIndirectCommandNV; }; #endif // wrapper struct for struct VkBindVideoSessionMemoryInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBindVideoSessionMemoryInfoKHR.html struct BindVideoSessionMemoryInfoKHR { using NativeType = VkBindVideoSessionMemoryInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindVideoSessionMemoryInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( uint32_t memoryBindIndex_ = {}, DeviceMemory memory_ = {}, DeviceSize memoryOffset_ = {}, DeviceSize memorySize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryBindIndex{ memoryBindIndex_ } , memory{ memory_ } , memoryOffset{ memoryOffset_ } , memorySize{ memorySize_ } { } VULKAN_HPP_CONSTEXPR BindVideoSessionMemoryInfoKHR( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; BindVideoSessionMemoryInfoKHR( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : BindVideoSessionMemoryInfoKHR( *reinterpret_cast( &rhs ) ) { } BindVideoSessionMemoryInfoKHR & operator=( BindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BindVideoSessionMemoryInfoKHR & operator=( VkBindVideoSessionMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryBindIndex( uint32_t memoryBindIndex_ ) & VULKAN_HPP_NOEXCEPT { memoryBindIndex = memoryBindIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR && setMemoryBindIndex( uint32_t memoryBindIndex_ ) && VULKAN_HPP_NOEXCEPT { memoryBindIndex = memoryBindIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemoryOffset( DeviceSize memoryOffset_ ) & VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR && setMemoryOffset( DeviceSize memoryOffset_ ) && VULKAN_HPP_NOEXCEPT { memoryOffset = memoryOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR & setMemorySize( DeviceSize memorySize_ ) & VULKAN_HPP_NOEXCEPT { memorySize = memorySize_; return *this; } VULKAN_HPP_CONSTEXPR_14 BindVideoSessionMemoryInfoKHR && setMemorySize( DeviceSize memorySize_ ) && VULKAN_HPP_NOEXCEPT { memorySize = memorySize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBindVideoSessionMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVideoSessionMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBindVideoSessionMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBindVideoSessionMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryBindIndex, memory, memoryOffset, memorySize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BindVideoSessionMemoryInfoKHR const & ) const = default; #else bool operator==( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryBindIndex == rhs.memoryBindIndex ) && ( memory == rhs.memory ) && ( memoryOffset == rhs.memoryOffset ) && ( memorySize == rhs.memorySize ); # endif } bool operator!=( BindVideoSessionMemoryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBindVideoSessionMemoryInfoKHR; const void * pNext = {}; uint32_t memoryBindIndex = {}; DeviceMemory memory = {}; DeviceSize memoryOffset = {}; DeviceSize memorySize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BindVideoSessionMemoryInfoKHR; }; #endif template <> struct CppType { using Type = BindVideoSessionMemoryInfoKHR; }; // wrapper struct for struct VkBlitImageCubicWeightsInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageCubicWeightsInfoQCOM.html struct BlitImageCubicWeightsInfoQCOM { using NativeType = VkBlitImageCubicWeightsInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageCubicWeightsInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( CubicFilterWeightsQCOM cubicWeights_ = CubicFilterWeightsQCOM::eCatmullRom, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cubicWeights{ cubicWeights_ } { } VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; BlitImageCubicWeightsInfoQCOM( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageCubicWeightsInfoQCOM( *reinterpret_cast( &rhs ) ) { } BlitImageCubicWeightsInfoQCOM & operator=( BlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BlitImageCubicWeightsInfoQCOM & operator=( VkBlitImageCubicWeightsInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM & setCubicWeights( CubicFilterWeightsQCOM cubicWeights_ ) & VULKAN_HPP_NOEXCEPT { cubicWeights = cubicWeights_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageCubicWeightsInfoQCOM && setCubicWeights( CubicFilterWeightsQCOM cubicWeights_ ) && VULKAN_HPP_NOEXCEPT { cubicWeights = cubicWeights_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBlitImageCubicWeightsInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBlitImageCubicWeightsInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBlitImageCubicWeightsInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBlitImageCubicWeightsInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cubicWeights ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BlitImageCubicWeightsInfoQCOM const & ) const = default; #else bool operator==( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicWeights == rhs.cubicWeights ); # endif } bool operator!=( BlitImageCubicWeightsInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBlitImageCubicWeightsInfoQCOM; const void * pNext = {}; CubicFilterWeightsQCOM cubicWeights = CubicFilterWeightsQCOM::eCatmullRom; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BlitImageCubicWeightsInfoQCOM; }; #endif template <> struct CppType { using Type = BlitImageCubicWeightsInfoQCOM; }; // wrapper struct for struct VkImageSubresourceLayers, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceLayers.html struct ImageSubresourceLayers { using NativeType = VkImageSubresourceLayers; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask{ aspectMask_ } , mipLevel{ mipLevel_ } , baseArrayLayer{ baseArrayLayer_ } , layerCount{ layerCount_ } { } VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresourceLayers( *reinterpret_cast( &rhs ) ) { } ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) & VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers && setMipLevel( uint32_t mipLevel_ ) && VULKAN_HPP_NOEXCEPT { mipLevel = mipLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) & VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers && setBaseArrayLayer( uint32_t baseArrayLayer_ ) && VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) & VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers && setLayerCount( uint32_t layerCount_ ) && VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresourceLayers const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresourceLayers const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSubresourceLayers *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( aspectMask, mipLevel, baseArrayLayer, layerCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSubresourceLayers const & ) const = default; #else bool operator==( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( aspectMask == rhs.aspectMask ) && ( mipLevel == rhs.mipLevel ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); # endif } bool operator!=( ImageSubresourceLayers const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageAspectFlags aspectMask = {}; uint32_t mipLevel = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSubresourceLayers; }; #endif // wrapper struct for struct VkImageBlit2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit2.html struct ImageBlit2 { using NativeType = VkImageBlit2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageSubresourceLayers srcSubresource_ = {}, std::array const & srcOffsets_ = {}, ImageSubresourceLayers dstSubresource_ = {}, std::array const & dstOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcSubresource{ srcSubresource_ } , srcOffsets{ srcOffsets_ } , dstSubresource{ dstSubresource_ } , dstOffsets{ dstOffsets_ } { } VULKAN_HPP_CONSTEXPR_14 ImageBlit2( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageBlit2( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit2( *reinterpret_cast( &rhs ) ) {} ImageBlit2 & operator=( ImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageBlit2 & operator=( VkImageBlit2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setSrcOffsets( std::array const & srcOffsets_ ) & VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 && setSrcOffsets( std::array const & srcOffsets_ ) && VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 & setDstOffsets( std::array const & dstOffsets_ ) & VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit2 && setDstOffsets( std::array const & dstOffsets_ ) && VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageBlit2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageBlit2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageBlit2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageBlit2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ImageSubresourceLayers const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcSubresource, srcOffsets, dstSubresource, dstOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageBlit2 const & ) const = default; #else bool operator==( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); # endif } bool operator!=( ImageBlit2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageBlit2; const void * pNext = {}; ImageSubresourceLayers srcSubresource = {}; ArrayWrapper1D srcOffsets = {}; ImageSubresourceLayers dstSubresource = {}; ArrayWrapper1D dstOffsets = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageBlit2; }; #endif template <> struct CppType { using Type = ImageBlit2; }; using ImageBlit2KHR = ImageBlit2; // wrapper struct for struct VkBlitImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBlitImageInfo2.html struct BlitImageInfo2 { using NativeType = VkBlitImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( Image srcImage_ = {}, ImageLayout srcImageLayout_ = ImageLayout::eUndefined, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const ImageBlit2 * pRegions_ = {}, Filter filter_ = Filter::eNearest, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcImage{ srcImage_ } , srcImageLayout{ srcImageLayout_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } , filter{ filter_ } { } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BlitImageInfo2( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BlitImageInfo2( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BlitImageInfo2( Image srcImage_, ImageLayout srcImageLayout_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & regions_, Filter filter_ = Filter::eNearest, const void * pNext_ = nullptr ) : pNext( pNext_ ) , srcImage( srcImage_ ) , srcImageLayout( srcImageLayout_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) , filter( filter_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BlitImageInfo2 & operator=( BlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BlitImageInfo2 & operator=( VkBlitImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImage( Image srcImage_ ) & VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setSrcImage( Image srcImage_ ) && VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) & VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setSrcImageLayout( ImageLayout srcImageLayout_ ) && VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setPRegions( const ImageBlit2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setPRegions( const ImageBlit2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BlitImageInfo2 & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 & setFilter( Filter filter_ ) & VULKAN_HPP_NOEXCEPT { filter = filter_; return *this; } VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2 && setFilter( Filter filter_ ) && VULKAN_HPP_NOEXCEPT { filter = filter_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBlitImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBlitImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBlitImageInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBlitImageInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BlitImageInfo2 const & ) const = default; #else bool operator==( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ) && ( filter == rhs.filter ); # endif } bool operator!=( BlitImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBlitImageInfo2; const void * pNext = {}; Image srcImage = {}; ImageLayout srcImageLayout = ImageLayout::eUndefined; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const ImageBlit2 * pRegions = {}; Filter filter = Filter::eNearest; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BlitImageInfo2; }; #endif template <> struct CppType { using Type = BlitImageInfo2; }; using BlitImageInfo2KHR = BlitImageInfo2; // wrapper struct for struct VkBufferCaptureDescriptorDataInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCaptureDescriptorDataInfoEXT.html struct BufferCaptureDescriptorDataInfoEXT { using NativeType = VkBufferCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCaptureDescriptorDataInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCaptureDescriptorDataInfoEXT( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } BufferCaptureDescriptorDataInfoEXT & operator=( BufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCaptureDescriptorDataInfoEXT & operator=( VkBufferCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCaptureDescriptorDataInfoEXT && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCaptureDescriptorDataInfoEXT const & ) const = default; #else bool operator==( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( BufferCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferCaptureDescriptorDataInfoEXT; const void * pNext = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCaptureDescriptorDataInfoEXT; }; #endif template <> struct CppType { using Type = BufferCaptureDescriptorDataInfoEXT; }; #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferCollectionBufferCreateInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionBufferCreateInfoFUCHSIA.html struct BufferCollectionBufferCreateInfoFUCHSIA { using NativeType = VkBufferCollectionBufferCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , collection{ collection_ } , index{ index_ } { } VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCollectionBufferCreateInfoFUCHSIA( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCollectionBufferCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferCollectionBufferCreateInfoFUCHSIA & operator=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionBufferCreateInfoFUCHSIA & operator=( VkBufferCollectionBufferCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) & VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA && setCollection( BufferCollectionFUCHSIA collection_ ) && VULKAN_HPP_NOEXCEPT { collection = collection_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA & setIndex( uint32_t index_ ) & VULKAN_HPP_NOEXCEPT { index = index_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionBufferCreateInfoFUCHSIA && setIndex( uint32_t index_ ) && VULKAN_HPP_NOEXCEPT { index = index_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionBufferCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionBufferCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionBufferCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCollectionBufferCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, collection, index ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCollectionBufferCreateInfoFUCHSIA const & ) const = default; # else bool operator==( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); # endif } bool operator!=( BufferCollectionBufferCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eBufferCollectionBufferCreateInfoFUCHSIA; const void * pNext = {}; BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCollectionBufferCreateInfoFUCHSIA; }; # endif template <> struct CppType { using Type = BufferCollectionBufferCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferCollectionConstraintsInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionConstraintsInfoFUCHSIA.html struct BufferCollectionConstraintsInfoFUCHSIA { using NativeType = VkBufferCollectionConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( uint32_t minBufferCount_ = {}, uint32_t maxBufferCount_ = {}, uint32_t minBufferCountForCamping_ = {}, uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minBufferCount{ minBufferCount_ } , maxBufferCount{ maxBufferCount_ } , minBufferCountForCamping{ minBufferCountForCamping_ } , minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ } , minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ } { } VULKAN_HPP_CONSTEXPR BufferCollectionConstraintsInfoFUCHSIA( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCollectionConstraintsInfoFUCHSIA( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCollectionConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferCollectionConstraintsInfoFUCHSIA & operator=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionConstraintsInfoFUCHSIA & operator=( VkBufferCollectionConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCount( uint32_t minBufferCount_ ) & VULKAN_HPP_NOEXCEPT { minBufferCount = minBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setMinBufferCount( uint32_t minBufferCount_ ) && VULKAN_HPP_NOEXCEPT { minBufferCount = minBufferCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMaxBufferCount( uint32_t maxBufferCount_ ) & VULKAN_HPP_NOEXCEPT { maxBufferCount = maxBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setMaxBufferCount( uint32_t maxBufferCount_ ) && VULKAN_HPP_NOEXCEPT { maxBufferCount = maxBufferCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) & VULKAN_HPP_NOEXCEPT { minBufferCountForCamping = minBufferCountForCamping_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setMinBufferCountForCamping( uint32_t minBufferCountForCamping_ ) && VULKAN_HPP_NOEXCEPT { minBufferCountForCamping = minBufferCountForCamping_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) & VULKAN_HPP_NOEXCEPT { minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setMinBufferCountForDedicatedSlack( uint32_t minBufferCountForDedicatedSlack_ ) && VULKAN_HPP_NOEXCEPT { minBufferCountForDedicatedSlack = minBufferCountForDedicatedSlack_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA & setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) & VULKAN_HPP_NOEXCEPT { minBufferCountForSharedSlack = minBufferCountForSharedSlack_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionConstraintsInfoFUCHSIA && setMinBufferCountForSharedSlack( uint32_t minBufferCountForSharedSlack_ ) && VULKAN_HPP_NOEXCEPT { minBufferCountForSharedSlack = minBufferCountForSharedSlack_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCollectionConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minBufferCount, maxBufferCount, minBufferCountForCamping, minBufferCountForDedicatedSlack, minBufferCountForSharedSlack ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCollectionConstraintsInfoFUCHSIA const & ) const = default; # else bool operator==( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minBufferCount == rhs.minBufferCount ) && ( maxBufferCount == rhs.maxBufferCount ) && ( minBufferCountForCamping == rhs.minBufferCountForCamping ) && ( minBufferCountForDedicatedSlack == rhs.minBufferCountForDedicatedSlack ) && ( minBufferCountForSharedSlack == rhs.minBufferCountForSharedSlack ); # endif } bool operator!=( BufferCollectionConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eBufferCollectionConstraintsInfoFUCHSIA; const void * pNext = {}; uint32_t minBufferCount = {}; uint32_t maxBufferCount = {}; uint32_t minBufferCountForCamping = {}; uint32_t minBufferCountForDedicatedSlack = {}; uint32_t minBufferCountForSharedSlack = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCollectionConstraintsInfoFUCHSIA; }; # endif template <> struct CppType { using Type = BufferCollectionConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferCollectionCreateInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionCreateInfoFUCHSIA.html struct BufferCollectionCreateInfoFUCHSIA { using NativeType = VkBufferCollectionCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionCreateInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , collectionToken{ collectionToken_ } { } VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCollectionCreateInfoFUCHSIA( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCollectionCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferCollectionCreateInfoFUCHSIA & operator=( BufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionCreateInfoFUCHSIA & operator=( VkBufferCollectionCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA & setCollectionToken( zx_handle_t collectionToken_ ) & VULKAN_HPP_NOEXCEPT { collectionToken = collectionToken_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionCreateInfoFUCHSIA && setCollectionToken( zx_handle_t collectionToken_ ) && VULKAN_HPP_NOEXCEPT { collectionToken = collectionToken_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCollectionCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, collectionToken ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &collectionToken, &rhs.collectionToken, sizeof( zx_handle_t ) ) == 0 ); } bool operator!=( BufferCollectionCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eBufferCollectionCreateInfoFUCHSIA; const void * pNext = {}; zx_handle_t collectionToken = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCollectionCreateInfoFUCHSIA; }; # endif template <> struct CppType { using Type = BufferCollectionCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferCollectionImageCreateInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionImageCreateInfoFUCHSIA.html struct BufferCollectionImageCreateInfoFUCHSIA { using NativeType = VkBufferCollectionImageCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , collection{ collection_ } , index{ index_ } { } VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCollectionImageCreateInfoFUCHSIA( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCollectionImageCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferCollectionImageCreateInfoFUCHSIA & operator=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionImageCreateInfoFUCHSIA & operator=( VkBufferCollectionImageCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) & VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA && setCollection( BufferCollectionFUCHSIA collection_ ) && VULKAN_HPP_NOEXCEPT { collection = collection_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA & setIndex( uint32_t index_ ) & VULKAN_HPP_NOEXCEPT { index = index_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCollectionImageCreateInfoFUCHSIA && setIndex( uint32_t index_ ) && VULKAN_HPP_NOEXCEPT { index = index_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCollectionImageCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionImageCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionImageCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCollectionImageCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, collection, index ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCollectionImageCreateInfoFUCHSIA const & ) const = default; # else bool operator==( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); # endif } bool operator!=( BufferCollectionImageCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eBufferCollectionImageCreateInfoFUCHSIA; const void * pNext = {}; BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCollectionImageCreateInfoFUCHSIA; }; # endif template <> struct CppType { using Type = BufferCollectionImageCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkSysmemColorSpaceFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSysmemColorSpaceFUCHSIA.html struct SysmemColorSpaceFUCHSIA { using NativeType = VkSysmemColorSpaceFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSysmemColorSpaceFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorSpace{ colorSpace_ } { } VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; SysmemColorSpaceFUCHSIA( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : SysmemColorSpaceFUCHSIA( *reinterpret_cast( &rhs ) ) { } SysmemColorSpaceFUCHSIA & operator=( SysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SysmemColorSpaceFUCHSIA & operator=( VkSysmemColorSpaceFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA & setColorSpace( uint32_t colorSpace_ ) & VULKAN_HPP_NOEXCEPT { colorSpace = colorSpace_; return *this; } VULKAN_HPP_CONSTEXPR_14 SysmemColorSpaceFUCHSIA && setColorSpace( uint32_t colorSpace_ ) && VULKAN_HPP_NOEXCEPT { colorSpace = colorSpace_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSysmemColorSpaceFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSysmemColorSpaceFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSysmemColorSpaceFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSysmemColorSpaceFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, colorSpace ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SysmemColorSpaceFUCHSIA const & ) const = default; # else bool operator==( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorSpace == rhs.colorSpace ); # endif } bool operator!=( SysmemColorSpaceFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eSysmemColorSpaceFUCHSIA; const void * pNext = {}; uint32_t colorSpace = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SysmemColorSpaceFUCHSIA; }; # endif template <> struct CppType { using Type = SysmemColorSpaceFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferCollectionPropertiesFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCollectionPropertiesFUCHSIA.html struct BufferCollectionPropertiesFUCHSIA { using NativeType = VkBufferCollectionPropertiesFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCollectionPropertiesFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, uint32_t bufferCount_ = {}, uint32_t createInfoIndex_ = {}, uint64_t sysmemPixelFormat_ = {}, FormatFeatureFlags formatFeatures_ = {}, SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex_ = {}, ComponentMapping samplerYcbcrConversionComponents_ = {}, SamplerYcbcrModelConversion suggestedYcbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange suggestedYcbcrRange_ = SamplerYcbcrRange::eItuFull, ChromaLocation suggestedXChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation suggestedYChromaOffset_ = ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } , bufferCount{ bufferCount_ } , createInfoIndex{ createInfoIndex_ } , sysmemPixelFormat{ sysmemPixelFormat_ } , formatFeatures{ formatFeatures_ } , sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ } , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } , suggestedYcbcrModel{ suggestedYcbcrModel_ } , suggestedYcbcrRange{ suggestedYcbcrRange_ } , suggestedXChromaOffset{ suggestedXChromaOffset_ } , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } VULKAN_HPP_CONSTEXPR BufferCollectionPropertiesFUCHSIA( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCollectionPropertiesFUCHSIA( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCollectionPropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferCollectionPropertiesFUCHSIA & operator=( BufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCollectionPropertiesFUCHSIA & operator=( VkBufferCollectionPropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkBufferCollectionPropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionPropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCollectionPropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCollectionPropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits, bufferCount, createInfoIndex, sysmemPixelFormat, formatFeatures, sysmemColorSpaceIndex, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCollectionPropertiesFUCHSIA const & ) const = default; # else bool operator==( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ) && ( bufferCount == rhs.bufferCount ) && ( createInfoIndex == rhs.createInfoIndex ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( formatFeatures == rhs.formatFeatures ) && ( sysmemColorSpaceIndex == rhs.sysmemColorSpaceIndex ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); # endif } bool operator!=( BufferCollectionPropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eBufferCollectionPropertiesFUCHSIA; void * pNext = {}; uint32_t memoryTypeBits = {}; uint32_t bufferCount = {}; uint32_t createInfoIndex = {}; uint64_t sysmemPixelFormat = {}; FormatFeatureFlags formatFeatures = {}; SysmemColorSpaceFUCHSIA sysmemColorSpaceIndex = {}; ComponentMapping samplerYcbcrConversionComponents = {}; SamplerYcbcrModelConversion suggestedYcbcrModel = SamplerYcbcrModelConversion::eRgbIdentity; SamplerYcbcrRange suggestedYcbcrRange = SamplerYcbcrRange::eItuFull; ChromaLocation suggestedXChromaOffset = ChromaLocation::eCositedEven; ChromaLocation suggestedYChromaOffset = ChromaLocation::eCositedEven; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCollectionPropertiesFUCHSIA; }; # endif template <> struct CppType { using Type = BufferCollectionPropertiesFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkBufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCreateInfo.html struct BufferCreateInfo { using NativeType = VkBufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateFlags flags_ = {}, DeviceSize size_ = {}, BufferUsageFlags usage_ = {}, SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , size{ size_ } , usage{ usage_ } , sharingMode{ sharingMode_ } , queueFamilyIndexCount{ queueFamilyIndexCount_ } , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCreateInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BufferCreateInfo( BufferCreateFlags flags_, DeviceSize size_, BufferUsageFlags usage_, SharingMode sharingMode_, ArrayProxyNoTemporaries const & queueFamilyIndices_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , size( size_ ) , usage( usage_ ) , sharingMode( sharingMode_ ) , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) , pQueueFamilyIndices( queueFamilyIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setFlags( BufferCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setFlags( BufferCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setUsage( BufferUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setUsage( BufferUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setSharingMode( SharingMode sharingMode_ ) & VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setSharingMode( SharingMode sharingMode_ ) && VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) & VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo && setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) && VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) BufferCreateInfo & setQueueFamilyIndices( ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCreateInfo const & ) const = default; #else bool operator==( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( size == rhs.size ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); # endif } bool operator!=( BufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferCreateInfo; const void * pNext = {}; BufferCreateFlags flags = {}; DeviceSize size = {}; BufferUsageFlags usage = {}; SharingMode sharingMode = SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t * pQueueFamilyIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCreateInfo; }; #endif template <> struct CppType { using Type = BufferCreateInfo; }; #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkBufferConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferConstraintsInfoFUCHSIA.html struct BufferConstraintsInfoFUCHSIA { using NativeType = VkBufferConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferConstraintsInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferCreateInfo createInfo_ = {}, FormatFeatureFlags requiredFormatFeatures_ = {}, BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , createInfo{ createInfo_ } , requiredFormatFeatures{ requiredFormatFeatures_ } , bufferCollectionConstraints{ bufferCollectionConstraints_ } { } VULKAN_HPP_CONSTEXPR BufferConstraintsInfoFUCHSIA( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferConstraintsInfoFUCHSIA( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : BufferConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } BufferConstraintsInfoFUCHSIA & operator=( BufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferConstraintsInfoFUCHSIA & operator=( VkBufferConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setCreateInfo( BufferCreateInfo const & createInfo_ ) & VULKAN_HPP_NOEXCEPT { createInfo = createInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA && setCreateInfo( BufferCreateInfo const & createInfo_ ) && VULKAN_HPP_NOEXCEPT { createInfo = createInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) & VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA && setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) && VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA & setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) & VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferConstraintsInfoFUCHSIA && setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) && VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, createInfo, requiredFormatFeatures, bufferCollectionConstraints ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferConstraintsInfoFUCHSIA const & ) const = default; # else bool operator==( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createInfo == rhs.createInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ); # endif } bool operator!=( BufferConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eBufferConstraintsInfoFUCHSIA; const void * pNext = {}; BufferCreateInfo createInfo = {}; FormatFeatureFlags requiredFormatFeatures = {}; BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferConstraintsInfoFUCHSIA; }; # endif template <> struct CppType { using Type = BufferConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkBufferCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy.html struct BufferCopy { using NativeType = VkBufferCopy; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCopy( DeviceSize srcOffset_ = {}, DeviceSize dstOffset_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcOffset{ srcOffset_ } , dstOffset{ dstOffset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy( *reinterpret_cast( &rhs ) ) {} BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSrcOffset( DeviceSize srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy && setSrcOffset( DeviceSize srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCopy & setDstOffset( DeviceSize dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy && setDstOffset( DeviceSize dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCopy & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCopy const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCopy *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcOffset, dstOffset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCopy const & ) const = default; #else bool operator==( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); # endif } bool operator!=( BufferCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize srcOffset = {}; DeviceSize dstOffset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCopy; }; #endif // wrapper struct for struct VkBufferCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferCopy2.html struct BufferCopy2 { using NativeType = VkBufferCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCopy2( DeviceSize srcOffset_ = {}, DeviceSize dstOffset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcOffset{ srcOffset_ } , dstOffset{ dstOffset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferCopy2( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferCopy2( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferCopy2( *reinterpret_cast( &rhs ) ) {} BufferCopy2 & operator=( BufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferCopy2 & operator=( VkBufferCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSrcOffset( DeviceSize srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 && setSrcOffset( DeviceSize srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setDstOffset( DeviceSize dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 && setDstOffset( DeviceSize dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferCopy2 && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferCopy2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferCopy2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcOffset, dstOffset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferCopy2 const & ) const = default; #else bool operator==( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcOffset == rhs.srcOffset ) && ( dstOffset == rhs.dstOffset ) && ( size == rhs.size ); # endif } bool operator!=( BufferCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferCopy2; const void * pNext = {}; DeviceSize srcOffset = {}; DeviceSize dstOffset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferCopy2; }; #endif template <> struct CppType { using Type = BufferCopy2; }; using BufferCopy2KHR = BufferCopy2; // wrapper struct for struct VkBufferDeviceAddressCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressCreateInfoEXT.html struct BufferDeviceAddressCreateInfoEXT { using NativeType = VkBufferDeviceAddressCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceAddress{ deviceAddress_ } { } VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferDeviceAddressCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferDeviceAddressCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferDeviceAddressCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferDeviceAddressCreateInfoEXT const & ) const = default; #else bool operator==( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ); # endif } bool operator!=( BufferDeviceAddressCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; const void * pNext = {}; DeviceAddress deviceAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferDeviceAddressCreateInfoEXT; }; #endif template <> struct CppType { using Type = BufferDeviceAddressCreateInfoEXT; }; // wrapper struct for struct VkBufferDeviceAddressInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferDeviceAddressInfo.html struct BufferDeviceAddressInfo { using NativeType = VkBufferDeviceAddressInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferDeviceAddressInfo( *reinterpret_cast( &rhs ) ) { } BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferDeviceAddressInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferDeviceAddressInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferDeviceAddressInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferDeviceAddressInfo const & ) const = default; #else bool operator==( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( BufferDeviceAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferDeviceAddressInfo; const void * pNext = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferDeviceAddressInfo; }; #endif template <> struct CppType { using Type = BufferDeviceAddressInfo; }; using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo; using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo; // wrapper struct for struct VkBufferImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy.html struct BufferImageCopy { using NativeType = VkBufferImageCopy; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferImageCopy( DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, ImageSubresourceLayers imageSubresource_ = {}, Offset3D imageOffset_ = {}, Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT : bufferOffset{ bufferOffset_ } , bufferRowLength{ bufferRowLength_ } , bufferImageHeight{ bufferImageHeight_ } , imageSubresource{ imageSubresource_ } , imageOffset{ imageOffset_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy( *reinterpret_cast( &rhs ) ) {} BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferOffset( DeviceSize bufferOffset_ ) & VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setBufferOffset( DeviceSize bufferOffset_ ) && VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) & VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setBufferRowLength( uint32_t bufferRowLength_ ) && VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) & VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setBufferImageHeight( uint32_t bufferImageHeight_ ) && VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageOffset( Offset3D const & imageOffset_ ) & VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setImageOffset( Offset3D const & imageOffset_ ) && VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & setImageExtent( Extent3D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy && setImageExtent( Extent3D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferImageCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferImageCopy const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferImageCopy *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferImageCopy const & ) const = default; #else bool operator==( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( BufferImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize bufferOffset = {}; uint32_t bufferRowLength = {}; uint32_t bufferImageHeight = {}; ImageSubresourceLayers imageSubresource = {}; Offset3D imageOffset = {}; Extent3D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferImageCopy; }; #endif // wrapper struct for struct VkBufferImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferImageCopy2.html struct BufferImageCopy2 { using NativeType = VkBufferImageCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferImageCopy2( DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, ImageSubresourceLayers imageSubresource_ = {}, Offset3D imageOffset_ = {}, Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , bufferOffset{ bufferOffset_ } , bufferRowLength{ bufferRowLength_ } , bufferImageHeight{ bufferImageHeight_ } , imageSubresource{ imageSubresource_ } , imageOffset{ imageOffset_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR BufferImageCopy2( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferImageCopy2( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferImageCopy2( *reinterpret_cast( &rhs ) ) {} BufferImageCopy2 & operator=( BufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferImageCopy2 & operator=( VkBufferImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferOffset( DeviceSize bufferOffset_ ) & VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setBufferOffset( DeviceSize bufferOffset_ ) && VULKAN_HPP_NOEXCEPT { bufferOffset = bufferOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferRowLength( uint32_t bufferRowLength_ ) & VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setBufferRowLength( uint32_t bufferRowLength_ ) && VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setBufferImageHeight( uint32_t bufferImageHeight_ ) & VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setBufferImageHeight( uint32_t bufferImageHeight_ ) && VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageOffset( Offset3D const & imageOffset_ ) & VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setImageOffset( Offset3D const & imageOffset_ ) && VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 & setImageExtent( Extent3D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2 && setImageExtent( Extent3D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferImageCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferImageCopy2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferImageCopy2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, bufferOffset, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferImageCopy2 const & ) const = default; #else bool operator==( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferOffset == rhs.bufferOffset ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( BufferImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferImageCopy2; const void * pNext = {}; DeviceSize bufferOffset = {}; uint32_t bufferRowLength = {}; uint32_t bufferImageHeight = {}; ImageSubresourceLayers imageSubresource = {}; Offset3D imageOffset = {}; Extent3D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferImageCopy2; }; #endif template <> struct CppType { using Type = BufferImageCopy2; }; using BufferImageCopy2KHR = BufferImageCopy2; // wrapper struct for struct VkBufferMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier.html struct BufferMemoryBarrier { using NativeType = VkBufferMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( AccessFlags srcAccessMask_ = {}, AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcAccessMask{ srcAccessMask_ } , dstAccessMask{ dstAccessMask_ } , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } , buffer{ buffer_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier( *reinterpret_cast( &rhs ) ) { } BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setSrcAccessMask( AccessFlags srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setDstAccessMask( AccessFlags dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferMemoryBarrier *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcAccessMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferMemoryBarrier const & ) const = default; #else bool operator==( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( BufferMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferMemoryBarrier; const void * pNext = {}; AccessFlags srcAccessMask = {}; AccessFlags dstAccessMask = {}; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; Buffer buffer = {}; DeviceSize offset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferMemoryBarrier; }; #endif template <> struct CppType { using Type = BufferMemoryBarrier; }; // wrapper struct for struct VkBufferMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryBarrier2.html struct BufferMemoryBarrier2 { using NativeType = VkBufferMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( PipelineStageFlags2 srcStageMask_ = {}, AccessFlags2 srcAccessMask_ = {}, PipelineStageFlags2 dstStageMask_ = {}, AccessFlags2 dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcStageMask{ srcStageMask_ } , srcAccessMask{ srcAccessMask_ } , dstStageMask{ dstStageMask_ } , dstAccessMask{ dstAccessMask_ } , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } , buffer{ buffer_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR BufferMemoryBarrier2( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryBarrier2( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryBarrier2( *reinterpret_cast( &rhs ) ) { } BufferMemoryBarrier2 & operator=( BufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryBarrier2 & operator=( VkBufferMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) & VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) && VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setSrcAccessMask( AccessFlags2 srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) & VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setDstStageMask( PipelineStageFlags2 dstStageMask_ ) && VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setDstAccessMask( AccessFlags2 dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier2 && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, srcQueueFamilyIndex, dstQueueFamilyIndex, buffer, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferMemoryBarrier2 const & ) const = default; #else bool operator==( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( BufferMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferMemoryBarrier2; const void * pNext = {}; PipelineStageFlags2 srcStageMask = {}; AccessFlags2 srcAccessMask = {}; PipelineStageFlags2 dstStageMask = {}; AccessFlags2 dstAccessMask = {}; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; Buffer buffer = {}; DeviceSize offset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferMemoryBarrier2; }; #endif template <> struct CppType { using Type = BufferMemoryBarrier2; }; using BufferMemoryBarrier2KHR = BufferMemoryBarrier2; // wrapper struct for struct VkBufferMemoryRequirementsInfo2, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferMemoryRequirementsInfo2.html struct BufferMemoryRequirementsInfo2 { using NativeType = VkBufferMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : BufferMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferMemoryRequirementsInfo2 const & ) const = default; #else bool operator==( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( BufferMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; const void * pNext = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferMemoryRequirementsInfo2; }; #endif template <> struct CppType { using Type = BufferMemoryRequirementsInfo2; }; using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; // wrapper struct for struct VkBufferOpaqueCaptureAddressCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferOpaqueCaptureAddressCreateInfo.html struct BufferOpaqueCaptureAddressCreateInfo { using NativeType = VkBufferOpaqueCaptureAddressCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , opaqueCaptureAddress{ opaqueCaptureAddress_ } { } VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast( &rhs ) ) { } BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) & VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo && setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) && VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferOpaqueCaptureAddressCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferOpaqueCaptureAddressCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferOpaqueCaptureAddressCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, opaqueCaptureAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const & ) const = default; #else bool operator==( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); # endif } bool operator!=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo; const void * pNext = {}; uint64_t opaqueCaptureAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferOpaqueCaptureAddressCreateInfo; }; #endif template <> struct CppType { using Type = BufferOpaqueCaptureAddressCreateInfo; }; using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; // wrapper struct for struct VkBufferUsageFlags2CreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferUsageFlags2CreateInfo.html struct BufferUsageFlags2CreateInfo { using NativeType = VkBufferUsageFlags2CreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , usage{ usage_ } { } VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferUsageFlags2CreateInfo( *reinterpret_cast( &rhs ) ) { } BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( BufferUsageFlags2 usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo && setUsage( BufferUsageFlags2 usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferUsageFlags2CreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferUsageFlags2CreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default; #else bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); # endif } bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferUsageFlags2CreateInfo; const void * pNext = {}; BufferUsageFlags2 usage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferUsageFlags2CreateInfo; }; #endif template <> struct CppType { using Type = BufferUsageFlags2CreateInfo; }; using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; // wrapper struct for struct VkBufferViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkBufferViewCreateInfo.html struct BufferViewCreateInfo { using NativeType = VkBufferViewCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateFlags flags_ = {}, Buffer buffer_ = {}, Format format_ = Format::eUndefined, DeviceSize offset_ = {}, DeviceSize range_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , buffer{ buffer_ } , format{ format_ } , offset{ offset_ } , range{ range_ } { } VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : BufferViewCreateInfo( *reinterpret_cast( &rhs ) ) { } BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFlags( BufferViewCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setFlags( BufferViewCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & setRange( DeviceSize range_ ) & VULKAN_HPP_NOEXCEPT { range = range_; return *this; } VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo && setRange( DeviceSize range_ ) && VULKAN_HPP_NOEXCEPT { range = range_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBufferViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBufferViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBufferViewCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, buffer, format, offset, range ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BufferViewCreateInfo const & ) const = default; #else bool operator==( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( buffer == rhs.buffer ) && ( format == rhs.format ) && ( offset == rhs.offset ) && ( range == rhs.range ); # endif } bool operator!=( BufferViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBufferViewCreateInfo; const void * pNext = {}; BufferViewCreateFlags flags = {}; Buffer buffer = {}; Format format = Format::eUndefined; DeviceSize offset = {}; DeviceSize range = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BufferViewCreateInfo; }; #endif template <> struct CppType { using Type = BufferViewCreateInfo; }; // wrapper struct for struct VkStridedDeviceAddressNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressNV.html struct StridedDeviceAddressNV { using NativeType = VkStridedDeviceAddressNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( DeviceAddress startAddress_ = {}, DeviceSize strideInBytes_ = {} ) VULKAN_HPP_NOEXCEPT : startAddress{ startAddress_ } , strideInBytes{ strideInBytes_ } { } VULKAN_HPP_CONSTEXPR StridedDeviceAddressNV( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; StridedDeviceAddressNV( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT : StridedDeviceAddressNV( *reinterpret_cast( &rhs ) ) { } StridedDeviceAddressNV & operator=( StridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ StridedDeviceAddressNV & operator=( VkStridedDeviceAddressNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStartAddress( DeviceAddress startAddress_ ) & VULKAN_HPP_NOEXCEPT { startAddress = startAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV && setStartAddress( DeviceAddress startAddress_ ) && VULKAN_HPP_NOEXCEPT { startAddress = startAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV & setStrideInBytes( DeviceSize strideInBytes_ ) & VULKAN_HPP_NOEXCEPT { strideInBytes = strideInBytes_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressNV && setStrideInBytes( DeviceSize strideInBytes_ ) && VULKAN_HPP_NOEXCEPT { strideInBytes = strideInBytes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkStridedDeviceAddressNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkStridedDeviceAddressNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( startAddress, strideInBytes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( StridedDeviceAddressNV const & ) const = default; #else bool operator==( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( startAddress == rhs.startAddress ) && ( strideInBytes == rhs.strideInBytes ); # endif } bool operator!=( StridedDeviceAddressNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress startAddress = {}; DeviceSize strideInBytes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = StridedDeviceAddressNV; }; #endif // wrapper struct for struct VkBuildPartitionedAccelerationStructureIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureIndirectCommandNV.html struct BuildPartitionedAccelerationStructureIndirectCommandNV { using NativeType = VkBuildPartitionedAccelerationStructureIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( PartitionedAccelerationStructureOpTypeNV opType_ = PartitionedAccelerationStructureOpTypeNV::eWriteInstance, uint32_t argCount_ = {}, StridedDeviceAddressNV argData_ = {} ) VULKAN_HPP_NOEXCEPT : opType{ opType_ } , argCount{ argCount_ } , argData{ argData_ } { } VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureIndirectCommandNV( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BuildPartitionedAccelerationStructureIndirectCommandNV( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : BuildPartitionedAccelerationStructureIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } BuildPartitionedAccelerationStructureIndirectCommandNV & operator=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BuildPartitionedAccelerationStructureIndirectCommandNV & operator=( VkBuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setOpType( PartitionedAccelerationStructureOpTypeNV opType_ ) & VULKAN_HPP_NOEXCEPT { opType = opType_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV && setOpType( PartitionedAccelerationStructureOpTypeNV opType_ ) && VULKAN_HPP_NOEXCEPT { opType = opType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgCount( uint32_t argCount_ ) & VULKAN_HPP_NOEXCEPT { argCount = argCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV && setArgCount( uint32_t argCount_ ) && VULKAN_HPP_NOEXCEPT { argCount = argCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV & setArgData( StridedDeviceAddressNV const & argData_ ) & VULKAN_HPP_NOEXCEPT { argData = argData_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureIndirectCommandNV && setArgData( StridedDeviceAddressNV const & argData_ ) && VULKAN_HPP_NOEXCEPT { argData = argData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( opType, argCount, argData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BuildPartitionedAccelerationStructureIndirectCommandNV const & ) const = default; #else bool operator==( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( opType == rhs.opType ) && ( argCount == rhs.argCount ) && ( argData == rhs.argData ); # endif } bool operator!=( BuildPartitionedAccelerationStructureIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: PartitionedAccelerationStructureOpTypeNV opType = PartitionedAccelerationStructureOpTypeNV::eWriteInstance; uint32_t argCount = {}; StridedDeviceAddressNV argData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BuildPartitionedAccelerationStructureIndirectCommandNV; }; #endif // wrapper struct for struct VkPartitionedAccelerationStructureInstancesInputNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureInstancesInputNV.html struct PartitionedAccelerationStructureInstancesInputNV { using NativeType = VkPartitionedAccelerationStructureInstancesInputNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( BuildAccelerationStructureFlagsKHR flags_ = {}, uint32_t instanceCount_ = {}, uint32_t maxInstancePerPartitionCount_ = {}, uint32_t partitionCount_ = {}, uint32_t maxInstanceInGlobalPartitionCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , instanceCount{ instanceCount_ } , maxInstancePerPartitionCount{ maxInstancePerPartitionCount_ } , partitionCount{ partitionCount_ } , maxInstanceInGlobalPartitionCount{ maxInstanceInGlobalPartitionCount_ } { } VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureInstancesInputNV( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PartitionedAccelerationStructureInstancesInputNV( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT : PartitionedAccelerationStructureInstancesInputNV( *reinterpret_cast( &rhs ) ) { } PartitionedAccelerationStructureInstancesInputNV & operator=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PartitionedAccelerationStructureInstancesInputNV & operator=( VkPartitionedAccelerationStructureInstancesInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setFlags( BuildAccelerationStructureFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setInstanceCount( uint32_t instanceCount_ ) & VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setInstanceCount( uint32_t instanceCount_ ) && VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) & VULKAN_HPP_NOEXCEPT { maxInstancePerPartitionCount = maxInstancePerPartitionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setMaxInstancePerPartitionCount( uint32_t maxInstancePerPartitionCount_ ) && VULKAN_HPP_NOEXCEPT { maxInstancePerPartitionCount = maxInstancePerPartitionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setPartitionCount( uint32_t partitionCount_ ) & VULKAN_HPP_NOEXCEPT { partitionCount = partitionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setPartitionCount( uint32_t partitionCount_ ) && VULKAN_HPP_NOEXCEPT { partitionCount = partitionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV & setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) & VULKAN_HPP_NOEXCEPT { maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureInstancesInputNV && setMaxInstanceInGlobalPartitionCount( uint32_t maxInstanceInGlobalPartitionCount_ ) && VULKAN_HPP_NOEXCEPT { maxInstanceInGlobalPartitionCount = maxInstanceInGlobalPartitionCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPartitionedAccelerationStructureInstancesInputNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureInstancesInputNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureInstancesInputNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureInstancesInputNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, instanceCount, maxInstancePerPartitionCount, partitionCount, maxInstanceInGlobalPartitionCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PartitionedAccelerationStructureInstancesInputNV const & ) const = default; #else bool operator==( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( instanceCount == rhs.instanceCount ) && ( maxInstancePerPartitionCount == rhs.maxInstancePerPartitionCount ) && ( partitionCount == rhs.partitionCount ) && ( maxInstanceInGlobalPartitionCount == rhs.maxInstanceInGlobalPartitionCount ); # endif } bool operator!=( PartitionedAccelerationStructureInstancesInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePartitionedAccelerationStructureInstancesInputNV; void * pNext = {}; BuildAccelerationStructureFlagsKHR flags = {}; uint32_t instanceCount = {}; uint32_t maxInstancePerPartitionCount = {}; uint32_t partitionCount = {}; uint32_t maxInstanceInGlobalPartitionCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PartitionedAccelerationStructureInstancesInputNV; }; #endif template <> struct CppType { using Type = PartitionedAccelerationStructureInstancesInputNV; }; // wrapper struct for struct VkBuildPartitionedAccelerationStructureInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkBuildPartitionedAccelerationStructureInfoNV.html struct BuildPartitionedAccelerationStructureInfoNV { using NativeType = VkBuildPartitionedAccelerationStructureInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( PartitionedAccelerationStructureInstancesInputNV input_ = {}, DeviceAddress srcAccelerationStructureData_ = {}, DeviceAddress dstAccelerationStructureData_ = {}, DeviceAddress scratchData_ = {}, DeviceAddress srcInfos_ = {}, DeviceAddress srcInfosCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , input{ input_ } , srcAccelerationStructureData{ srcAccelerationStructureData_ } , dstAccelerationStructureData{ dstAccelerationStructureData_ } , scratchData{ scratchData_ } , srcInfos{ srcInfos_ } , srcInfosCount{ srcInfosCount_ } { } VULKAN_HPP_CONSTEXPR BuildPartitionedAccelerationStructureInfoNV( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; BuildPartitionedAccelerationStructureInfoNV( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : BuildPartitionedAccelerationStructureInfoNV( *reinterpret_cast( &rhs ) ) { } BuildPartitionedAccelerationStructureInfoNV & operator=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ BuildPartitionedAccelerationStructureInfoNV & operator=( VkBuildPartitionedAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setInput( PartitionedAccelerationStructureInstancesInputNV const & input_ ) & VULKAN_HPP_NOEXCEPT { input = input_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setInput( PartitionedAccelerationStructureInstancesInputNV const & input_ ) && VULKAN_HPP_NOEXCEPT { input = input_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcAccelerationStructureData( DeviceAddress srcAccelerationStructureData_ ) & VULKAN_HPP_NOEXCEPT { srcAccelerationStructureData = srcAccelerationStructureData_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setSrcAccelerationStructureData( DeviceAddress srcAccelerationStructureData_ ) && VULKAN_HPP_NOEXCEPT { srcAccelerationStructureData = srcAccelerationStructureData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setDstAccelerationStructureData( DeviceAddress dstAccelerationStructureData_ ) & VULKAN_HPP_NOEXCEPT { dstAccelerationStructureData = dstAccelerationStructureData_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setDstAccelerationStructureData( DeviceAddress dstAccelerationStructureData_ ) && VULKAN_HPP_NOEXCEPT { dstAccelerationStructureData = dstAccelerationStructureData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setScratchData( DeviceAddress scratchData_ ) & VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setScratchData( DeviceAddress scratchData_ ) && VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfos( DeviceAddress srcInfos_ ) & VULKAN_HPP_NOEXCEPT { srcInfos = srcInfos_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setSrcInfos( DeviceAddress srcInfos_ ) && VULKAN_HPP_NOEXCEPT { srcInfos = srcInfos_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV & setSrcInfosCount( DeviceAddress srcInfosCount_ ) & VULKAN_HPP_NOEXCEPT { srcInfosCount = srcInfosCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 BuildPartitionedAccelerationStructureInfoNV && setSrcInfosCount( DeviceAddress srcInfosCount_ ) && VULKAN_HPP_NOEXCEPT { srcInfosCount = srcInfosCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkBuildPartitionedAccelerationStructureInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkBuildPartitionedAccelerationStructureInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, input, srcAccelerationStructureData, dstAccelerationStructureData, scratchData, srcInfos, srcInfosCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( BuildPartitionedAccelerationStructureInfoNV const & ) const = default; #else bool operator==( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( input == rhs.input ) && ( srcAccelerationStructureData == rhs.srcAccelerationStructureData ) && ( dstAccelerationStructureData == rhs.dstAccelerationStructureData ) && ( scratchData == rhs.scratchData ) && ( srcInfos == rhs.srcInfos ) && ( srcInfosCount == rhs.srcInfosCount ); # endif } bool operator!=( BuildPartitionedAccelerationStructureInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eBuildPartitionedAccelerationStructureInfoNV; void * pNext = {}; PartitionedAccelerationStructureInstancesInputNV input = {}; DeviceAddress srcAccelerationStructureData = {}; DeviceAddress dstAccelerationStructureData = {}; DeviceAddress scratchData = {}; DeviceAddress srcInfos = {}; DeviceAddress srcInfosCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = BuildPartitionedAccelerationStructureInfoNV; }; #endif template <> struct CppType { using Type = BuildPartitionedAccelerationStructureInfoNV; }; // wrapper struct for struct VkCalibratedTimestampInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCalibratedTimestampInfoKHR.html struct CalibratedTimestampInfoKHR { using NativeType = VkCalibratedTimestampInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( TimeDomainKHR timeDomain_ = TimeDomainKHR::eDevice, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , timeDomain{ timeDomain_ } { } VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CalibratedTimestampInfoKHR( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CalibratedTimestampInfoKHR( *reinterpret_cast( &rhs ) ) { } CalibratedTimestampInfoKHR & operator=( CalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CalibratedTimestampInfoKHR & operator=( VkCalibratedTimestampInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR & setTimeDomain( TimeDomainKHR timeDomain_ ) & VULKAN_HPP_NOEXCEPT { timeDomain = timeDomain_; return *this; } VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoKHR && setTimeDomain( TimeDomainKHR timeDomain_ ) && VULKAN_HPP_NOEXCEPT { timeDomain = timeDomain_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCalibratedTimestampInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCalibratedTimestampInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCalibratedTimestampInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCalibratedTimestampInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, timeDomain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CalibratedTimestampInfoKHR const & ) const = default; #else bool operator==( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timeDomain == rhs.timeDomain ); # endif } bool operator!=( CalibratedTimestampInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCalibratedTimestampInfoKHR; const void * pNext = {}; TimeDomainKHR timeDomain = TimeDomainKHR::eDevice; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CalibratedTimestampInfoKHR; }; #endif template <> struct CppType { using Type = CalibratedTimestampInfoKHR; }; using CalibratedTimestampInfoEXT = CalibratedTimestampInfoKHR; // wrapper struct for struct VkCheckpointData2NV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointData2NV.html struct CheckpointData2NV { using NativeType = VkCheckpointData2NV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointData2NV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CheckpointData2NV( PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stage{ stage_ } , pCheckpointMarker{ pCheckpointMarker_ } { } VULKAN_HPP_CONSTEXPR CheckpointData2NV( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CheckpointData2NV( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointData2NV( *reinterpret_cast( &rhs ) ) {} CheckpointData2NV & operator=( CheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CheckpointData2NV & operator=( VkCheckpointData2NV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkCheckpointData2NV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCheckpointData2NV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCheckpointData2NV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCheckpointData2NV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stage, pCheckpointMarker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CheckpointData2NV const & ) const = default; #else bool operator==( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); # endif } bool operator!=( CheckpointData2NV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCheckpointData2NV; void * pNext = {}; PipelineStageFlags2 stage = {}; void * pCheckpointMarker = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CheckpointData2NV; }; #endif template <> struct CppType { using Type = CheckpointData2NV; }; // wrapper struct for struct VkCheckpointDataNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCheckpointDataNV.html struct CheckpointDataNV { using NativeType = VkCheckpointDataNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CheckpointDataNV( PipelineStageFlagBits stage_ = PipelineStageFlagBits::eTopOfPipe, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stage{ stage_ } , pCheckpointMarker{ pCheckpointMarker_ } { } VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : CheckpointDataNV( *reinterpret_cast( &rhs ) ) {} CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkCheckpointDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCheckpointDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCheckpointDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stage, pCheckpointMarker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CheckpointDataNV const & ) const = default; #else bool operator==( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( pCheckpointMarker == rhs.pCheckpointMarker ); # endif } bool operator!=( CheckpointDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCheckpointDataNV; void * pNext = {}; PipelineStageFlagBits stage = PipelineStageFlagBits::eTopOfPipe; void * pCheckpointMarker = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CheckpointDataNV; }; #endif template <> struct CppType { using Type = CheckpointDataNV; }; union ClearColorValue { using NativeType = VkClearColorValue; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & float32_ = {} ) : float32( float32_ ) {} VULKAN_HPP_CONSTEXPR ClearColorValue( float float32_0, float float32_1, float float32_2, float float32_3 ) : float32{ { { float32_0, float32_1, float32_2, float32_3 } } } { } VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & int32_ ) : int32( int32_ ) {} VULKAN_HPP_CONSTEXPR ClearColorValue( int32_t int32_0, int32_t int32_1, int32_t int32_2, int32_t int32_3 ) : int32{ { { int32_0, int32_1, int32_2, int32_3 } } } { } VULKAN_HPP_CONSTEXPR_14 ClearColorValue( const std::array & uint32_ ) : uint32( uint32_ ) {} VULKAN_HPP_CONSTEXPR ClearColorValue( uint32_t uint32_0, uint32_t uint32_1, uint32_t uint32_2, uint32_t uint32_3 ) : uint32{ { { uint32_0, uint32_1, uint32_2, uint32_3 } } } { } #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setFloat32( std::array float32_ ) & VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearColorValue && setFloat32( std::array float32_ ) && VULKAN_HPP_NOEXCEPT { float32 = float32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setInt32( std::array int32_ ) & VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearColorValue && setInt32( std::array int32_ ) && VULKAN_HPP_NOEXCEPT { int32 = int32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearColorValue & setUint32( std::array uint32_ ) & VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearColorValue && setUint32( std::array uint32_ ) && VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClearColorValue const &() const { return *reinterpret_cast( this ); } operator VkClearColorValue &() { return *reinterpret_cast( this ); } ArrayWrapper1D float32; ArrayWrapper1D int32; ArrayWrapper1D uint32; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClearColorValue; }; #endif // wrapper struct for struct VkClearDepthStencilValue, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearDepthStencilValue.html struct ClearDepthStencilValue { using NativeType = VkClearDepthStencilValue; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT : depth{ depth_ } , stencil{ stencil_ } { } VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT : ClearDepthStencilValue( *reinterpret_cast( &rhs ) ) { } ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setDepth( float depth_ ) & VULKAN_HPP_NOEXCEPT { depth = depth_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue && setDepth( float depth_ ) && VULKAN_HPP_NOEXCEPT { depth = depth_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & setStencil( uint32_t stencil_ ) & VULKAN_HPP_NOEXCEPT { stencil = stencil_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue && setStencil( uint32_t stencil_ ) && VULKAN_HPP_NOEXCEPT { stencil = stencil_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClearDepthStencilValue const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearDepthStencilValue const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClearDepthStencilValue *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( depth, stencil ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClearDepthStencilValue const & ) const = default; #else bool operator==( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( depth == rhs.depth ) && ( stencil == rhs.stencil ); # endif } bool operator!=( ClearDepthStencilValue const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float depth = {}; uint32_t stencil = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClearDepthStencilValue; }; #endif union ClearValue { using NativeType = VkClearValue; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClearValue( ClearColorValue color_ = {} ) : color( color_ ) {} VULKAN_HPP_CONSTEXPR_14 ClearValue( ClearDepthStencilValue depthStencil_ ) : depthStencil( depthStencil_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClearValue & setColor( ClearColorValue const & color_ ) & VULKAN_HPP_NOEXCEPT { color = color_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearValue && setColor( ClearColorValue const & color_ ) && VULKAN_HPP_NOEXCEPT { color = color_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearValue & setDepthStencil( ClearDepthStencilValue const & depthStencil_ ) & VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearValue && setDepthStencil( ClearDepthStencilValue const & depthStencil_ ) && VULKAN_HPP_NOEXCEPT { depthStencil = depthStencil_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClearValue const &() const { return *reinterpret_cast( this ); } operator VkClearValue &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ClearColorValue color; ClearDepthStencilValue depthStencil; #else VkClearColorValue color; VkClearDepthStencilValue depthStencil; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClearValue; }; #endif // wrapper struct for struct VkClearAttachment, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearAttachment.html struct ClearAttachment { using NativeType = VkClearAttachment; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask{ aspectMask_ } , colorAttachment{ colorAttachment_ } , clearValue{ clearValue_ } { } VULKAN_HPP_CONSTEXPR_14 ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT : ClearAttachment( *reinterpret_cast( &rhs ) ) {} ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearAttachment && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) & VULKAN_HPP_NOEXCEPT { colorAttachment = colorAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearAttachment && setColorAttachment( uint32_t colorAttachment_ ) && VULKAN_HPP_NOEXCEPT { colorAttachment = colorAttachment_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearAttachment & setClearValue( ClearValue const & clearValue_ ) & VULKAN_HPP_NOEXCEPT { clearValue = clearValue_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearAttachment && setClearValue( ClearValue const & clearValue_ ) && VULKAN_HPP_NOEXCEPT { clearValue = clearValue_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClearAttachment const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearAttachment const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClearAttachment *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( aspectMask, colorAttachment, clearValue ); } #endif public: ImageAspectFlags aspectMask = {}; uint32_t colorAttachment = {}; ClearValue clearValue = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClearAttachment; }; #endif // wrapper struct for struct VkClearRect, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkClearRect.html struct ClearRect { using NativeType = VkClearRect; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClearRect( Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : rect{ rect_ } , baseArrayLayer{ baseArrayLayer_ } , layerCount{ layerCount_ } { } VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT : ClearRect( *reinterpret_cast( &rhs ) ) {} ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClearRect & setRect( Rect2D const & rect_ ) & VULKAN_HPP_NOEXCEPT { rect = rect_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearRect && setRect( Rect2D const & rect_ ) && VULKAN_HPP_NOEXCEPT { rect = rect_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) & VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearRect && setBaseArrayLayer( uint32_t baseArrayLayer_ ) && VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClearRect & setLayerCount( uint32_t layerCount_ ) & VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClearRect && setLayerCount( uint32_t layerCount_ ) && VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClearRect const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearRect &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClearRect const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClearRect *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( rect, baseArrayLayer, layerCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClearRect const & ) const = default; #else bool operator==( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( rect == rhs.rect ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); # endif } bool operator!=( ClearRect const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Rect2D rect = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClearRect; }; #endif // wrapper struct for struct VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV.html struct ClusterAccelerationStructureBuildClustersBottomLevelInfoNV { using NativeType = VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( uint32_t clusterReferencesCount_ = {}, uint32_t clusterReferencesStride_ = {}, DeviceAddress clusterReferences_ = {} ) VULKAN_HPP_NOEXCEPT : clusterReferencesCount{ clusterReferencesCount_ } , clusterReferencesStride{ clusterReferencesStride_ } , clusterReferences{ clusterReferences_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureBuildClustersBottomLevelInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & operator=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & operator=( VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferencesCount( uint32_t clusterReferencesCount_ ) & VULKAN_HPP_NOEXCEPT { clusterReferencesCount = clusterReferencesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV && setClusterReferencesCount( uint32_t clusterReferencesCount_ ) && VULKAN_HPP_NOEXCEPT { clusterReferencesCount = clusterReferencesCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferencesStride( uint32_t clusterReferencesStride_ ) & VULKAN_HPP_NOEXCEPT { clusterReferencesStride = clusterReferencesStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV && setClusterReferencesStride( uint32_t clusterReferencesStride_ ) && VULKAN_HPP_NOEXCEPT { clusterReferencesStride = clusterReferencesStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV & setClusterReferences( DeviceAddress clusterReferences_ ) & VULKAN_HPP_NOEXCEPT { clusterReferences = clusterReferences_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildClustersBottomLevelInfoNV && setClusterReferences( DeviceAddress clusterReferences_ ) && VULKAN_HPP_NOEXCEPT { clusterReferences = clusterReferences_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildClustersBottomLevelInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( clusterReferencesCount, clusterReferencesStride, clusterReferences ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( clusterReferencesCount == rhs.clusterReferencesCount ) && ( clusterReferencesStride == rhs.clusterReferencesStride ) && ( clusterReferences == rhs.clusterReferences ); # endif } bool operator!=( ClusterAccelerationStructureBuildClustersBottomLevelInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t clusterReferencesCount = {}; uint32_t clusterReferencesStride = {}; DeviceAddress clusterReferences = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureBuildClustersBottomLevelInfoNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV.html struct ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV { using NativeType = VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( uint32_t geometryIndex_ = {}, uint32_t reserved_ = {}, uint32_t geometryFlags_ = {} ) VULKAN_HPP_NOEXCEPT : geometryIndex{ geometryIndex_ } , reserved{ reserved_ } , geometryFlags{ geometryFlags_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & operator=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & operator=( VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryIndex( uint32_t geometryIndex_ ) & VULKAN_HPP_NOEXCEPT { geometryIndex = geometryIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV && setGeometryIndex( uint32_t geometryIndex_ ) && VULKAN_HPP_NOEXCEPT { geometryIndex = geometryIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setReserved( uint32_t reserved_ ) & VULKAN_HPP_NOEXCEPT { reserved = reserved_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV && setReserved( uint32_t reserved_ ) && VULKAN_HPP_NOEXCEPT { reserved = reserved_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV & setGeometryFlags( uint32_t geometryFlags_ ) & VULKAN_HPP_NOEXCEPT { geometryFlags = geometryFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV && setGeometryFlags( uint32_t geometryFlags_ ) && VULKAN_HPP_NOEXCEPT { geometryFlags = geometryFlags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( geometryIndex, reserved, geometryFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( geometryIndex == rhs.geometryIndex ) && ( reserved == rhs.reserved ) && ( geometryFlags == rhs.geometryFlags ); # endif } bool operator!=( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t geometryIndex : 24; uint32_t reserved : 5; uint32_t geometryFlags : 3; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterInfoNV.html struct ClusterAccelerationStructureBuildTriangleClusterInfoNV { using NativeType = VkClusterAccelerationStructureBuildTriangleClusterInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( uint32_t clusterID_ = {}, ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, uint32_t triangleCount_ = {}, uint32_t vertexCount_ = {}, uint32_t positionTruncateBitCount_ = {}, uint32_t indexType_ = {}, uint32_t opacityMicromapIndexType_ = {}, ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, uint16_t indexBufferStride_ = {}, uint16_t vertexBufferStride_ = {}, uint16_t geometryIndexAndFlagsBufferStride_ = {}, uint16_t opacityMicromapIndexBufferStride_ = {}, DeviceAddress indexBuffer_ = {}, DeviceAddress vertexBuffer_ = {}, DeviceAddress geometryIndexAndFlagsBuffer_ = {}, DeviceAddress opacityMicromapArray_ = {}, DeviceAddress opacityMicromapIndexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT : clusterID{ clusterID_ } , clusterFlags{ clusterFlags_ } , triangleCount{ triangleCount_ } , vertexCount{ vertexCount_ } , positionTruncateBitCount{ positionTruncateBitCount_ } , indexType{ indexType_ } , opacityMicromapIndexType{ opacityMicromapIndexType_ } , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } , indexBufferStride{ indexBufferStride_ } , vertexBufferStride{ vertexBufferStride_ } , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } , indexBuffer{ indexBuffer_ } , vertexBuffer{ vertexBuffer_ } , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } , opacityMicromapArray{ opacityMicromapArray_ } , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureBuildTriangleClusterInfoNV( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureBuildTriangleClusterInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureBuildTriangleClusterInfoNV & operator=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureBuildTriangleClusterInfoNV & operator=( VkClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterID( uint32_t clusterID_ ) & VULKAN_HPP_NOEXCEPT { clusterID = clusterID_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setClusterID( uint32_t clusterID_ ) && VULKAN_HPP_NOEXCEPT { clusterID = clusterID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) & VULKAN_HPP_NOEXCEPT { clusterFlags = clusterFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) && VULKAN_HPP_NOEXCEPT { clusterFlags = clusterFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setTriangleCount( uint32_t triangleCount_ ) & VULKAN_HPP_NOEXCEPT { triangleCount = triangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setTriangleCount( uint32_t triangleCount_ ) && VULKAN_HPP_NOEXCEPT { triangleCount = triangleCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexCount( uint32_t vertexCount_ ) & VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setVertexCount( uint32_t vertexCount_ ) && VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) & VULKAN_HPP_NOEXCEPT { positionTruncateBitCount = positionTruncateBitCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) && VULKAN_HPP_NOEXCEPT { positionTruncateBitCount = positionTruncateBitCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexType( uint32_t indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setIndexType( uint32_t indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexType = opacityMicromapIndexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexType = opacityMicromapIndexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setBaseGeometryIndexAndGeometryFlags( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) & VULKAN_HPP_NOEXCEPT { baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setBaseGeometryIndexAndGeometryFlags( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) && VULKAN_HPP_NOEXCEPT { baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { indexBufferStride = indexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setIndexBufferStride( uint16_t indexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { indexBufferStride = indexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { vertexBufferStride = vertexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setVertexBufferStride( uint16_t vertexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { vertexBufferStride = vertexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) & VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) && VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setIndexBuffer( DeviceAddress indexBuffer_ ) & VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setIndexBuffer( DeviceAddress indexBuffer_ ) && VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setVertexBuffer( DeviceAddress vertexBuffer_ ) & VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setVertexBuffer( DeviceAddress vertexBuffer_ ) && VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) & VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) && VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapArray = opacityMicromapArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapArray = opacityMicromapArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV & setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterInfoNV && setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( clusterID, clusterFlags, triangleCount, vertexCount, positionTruncateBitCount, indexType, opacityMicromapIndexType, baseGeometryIndexAndGeometryFlags, indexBufferStride, vertexBufferStride, geometryIndexAndFlagsBufferStride, opacityMicromapIndexBufferStride, indexBuffer, vertexBuffer, geometryIndexAndFlagsBuffer, opacityMicromapArray, opacityMicromapIndexBuffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ); # endif } bool operator!=( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t clusterID = {}; ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; uint32_t triangleCount : 9; uint32_t vertexCount : 9; uint32_t positionTruncateBitCount : 6; uint32_t indexType : 4; uint32_t opacityMicromapIndexType : 4; ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; uint16_t indexBufferStride = {}; uint16_t vertexBufferStride = {}; uint16_t geometryIndexAndFlagsBufferStride = {}; uint16_t opacityMicromapIndexBufferStride = {}; DeviceAddress indexBuffer = {}; DeviceAddress vertexBuffer = {}; DeviceAddress geometryIndexAndFlagsBuffer = {}; DeviceAddress opacityMicromapArray = {}; DeviceAddress opacityMicromapIndexBuffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureBuildTriangleClusterInfoNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV.html struct ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV { using NativeType = VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( uint32_t clusterID_ = {}, ClusterAccelerationStructureClusterFlagsNV clusterFlags_ = {}, uint32_t triangleCount_ = {}, uint32_t vertexCount_ = {}, uint32_t positionTruncateBitCount_ = {}, uint32_t indexType_ = {}, uint32_t opacityMicromapIndexType_ = {}, ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags_ = {}, uint16_t indexBufferStride_ = {}, uint16_t vertexBufferStride_ = {}, uint16_t geometryIndexAndFlagsBufferStride_ = {}, uint16_t opacityMicromapIndexBufferStride_ = {}, DeviceAddress indexBuffer_ = {}, DeviceAddress vertexBuffer_ = {}, DeviceAddress geometryIndexAndFlagsBuffer_ = {}, DeviceAddress opacityMicromapArray_ = {}, DeviceAddress opacityMicromapIndexBuffer_ = {}, DeviceAddress instantiationBoundingBoxLimit_ = {} ) VULKAN_HPP_NOEXCEPT : clusterID{ clusterID_ } , clusterFlags{ clusterFlags_ } , triangleCount{ triangleCount_ } , vertexCount{ vertexCount_ } , positionTruncateBitCount{ positionTruncateBitCount_ } , indexType{ indexType_ } , opacityMicromapIndexType{ opacityMicromapIndexType_ } , baseGeometryIndexAndGeometryFlags{ baseGeometryIndexAndGeometryFlags_ } , indexBufferStride{ indexBufferStride_ } , vertexBufferStride{ vertexBufferStride_ } , geometryIndexAndFlagsBufferStride{ geometryIndexAndFlagsBufferStride_ } , opacityMicromapIndexBufferStride{ opacityMicromapIndexBufferStride_ } , indexBuffer{ indexBuffer_ } , vertexBuffer{ vertexBuffer_ } , geometryIndexAndFlagsBuffer{ geometryIndexAndFlagsBuffer_ } , opacityMicromapArray{ opacityMicromapArray_ } , opacityMicromapIndexBuffer{ opacityMicromapIndexBuffer_ } , instantiationBoundingBoxLimit{ instantiationBoundingBoxLimit_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( *reinterpret_cast( &rhs ) ) { } explicit ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV( ClusterAccelerationStructureBuildTriangleClusterInfoNV const & clusterAccelerationStructureBuildTriangleClusterInfoNV, DeviceAddress instantiationBoundingBoxLimit_ = {} ) : clusterID( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterID ) , clusterFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.clusterFlags ) , triangleCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.triangleCount ) , vertexCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexCount ) , positionTruncateBitCount( clusterAccelerationStructureBuildTriangleClusterInfoNV.positionTruncateBitCount ) , indexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexType ) , opacityMicromapIndexType( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexType ) , baseGeometryIndexAndGeometryFlags( clusterAccelerationStructureBuildTriangleClusterInfoNV.baseGeometryIndexAndGeometryFlags ) , indexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBufferStride ) , vertexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBufferStride ) , geometryIndexAndFlagsBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBufferStride ) , opacityMicromapIndexBufferStride( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBufferStride ) , indexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.indexBuffer ) , vertexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.vertexBuffer ) , geometryIndexAndFlagsBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.geometryIndexAndFlagsBuffer ) , opacityMicromapArray( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapArray ) , opacityMicromapIndexBuffer( clusterAccelerationStructureBuildTriangleClusterInfoNV.opacityMicromapIndexBuffer ) , instantiationBoundingBoxLimit( instantiationBoundingBoxLimit_ ) { } ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & operator=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & operator=( VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterID( uint32_t clusterID_ ) & VULKAN_HPP_NOEXCEPT { clusterID = clusterID_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setClusterID( uint32_t clusterID_ ) && VULKAN_HPP_NOEXCEPT { clusterID = clusterID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) & VULKAN_HPP_NOEXCEPT { clusterFlags = clusterFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setClusterFlags( ClusterAccelerationStructureClusterFlagsNV clusterFlags_ ) && VULKAN_HPP_NOEXCEPT { clusterFlags = clusterFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setTriangleCount( uint32_t triangleCount_ ) & VULKAN_HPP_NOEXCEPT { triangleCount = triangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setTriangleCount( uint32_t triangleCount_ ) && VULKAN_HPP_NOEXCEPT { triangleCount = triangleCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexCount( uint32_t vertexCount_ ) & VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setVertexCount( uint32_t vertexCount_ ) && VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) & VULKAN_HPP_NOEXCEPT { positionTruncateBitCount = positionTruncateBitCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setPositionTruncateBitCount( uint32_t positionTruncateBitCount_ ) && VULKAN_HPP_NOEXCEPT { positionTruncateBitCount = positionTruncateBitCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexType( uint32_t indexType_ ) & VULKAN_HPP_NOEXCEPT { indexType = indexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setIndexType( uint32_t indexType_ ) && VULKAN_HPP_NOEXCEPT { indexType = indexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexType = opacityMicromapIndexType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setOpacityMicromapIndexType( uint32_t opacityMicromapIndexType_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexType = opacityMicromapIndexType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setBaseGeometryIndexAndGeometryFlags( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) & VULKAN_HPP_NOEXCEPT { baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setBaseGeometryIndexAndGeometryFlags( ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV const & baseGeometryIndexAndGeometryFlags_ ) && VULKAN_HPP_NOEXCEPT { baseGeometryIndexAndGeometryFlags = baseGeometryIndexAndGeometryFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexBufferStride( uint16_t indexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { indexBufferStride = indexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setIndexBufferStride( uint16_t indexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { indexBufferStride = indexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexBufferStride( uint16_t vertexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { vertexBufferStride = vertexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setVertexBufferStride( uint16_t vertexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { vertexBufferStride = vertexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) & VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setGeometryIndexAndFlagsBufferStride( uint16_t geometryIndexAndFlagsBufferStride_ ) && VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBufferStride = geometryIndexAndFlagsBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setOpacityMicromapIndexBufferStride( uint16_t opacityMicromapIndexBufferStride_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBufferStride = opacityMicromapIndexBufferStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setIndexBuffer( DeviceAddress indexBuffer_ ) & VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setIndexBuffer( DeviceAddress indexBuffer_ ) && VULKAN_HPP_NOEXCEPT { indexBuffer = indexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setVertexBuffer( DeviceAddress vertexBuffer_ ) & VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setVertexBuffer( DeviceAddress vertexBuffer_ ) && VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) & VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setGeometryIndexAndFlagsBuffer( DeviceAddress geometryIndexAndFlagsBuffer_ ) && VULKAN_HPP_NOEXCEPT { geometryIndexAndFlagsBuffer = geometryIndexAndFlagsBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapArray = opacityMicromapArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setOpacityMicromapArray( DeviceAddress opacityMicromapArray_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapArray = opacityMicromapArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) & VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setOpacityMicromapIndexBuffer( DeviceAddress opacityMicromapIndexBuffer_ ) && VULKAN_HPP_NOEXCEPT { opacityMicromapIndexBuffer = opacityMicromapIndexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV & setInstantiationBoundingBoxLimit( DeviceAddress instantiationBoundingBoxLimit_ ) & VULKAN_HPP_NOEXCEPT { instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV && setInstantiationBoundingBoxLimit( DeviceAddress instantiationBoundingBoxLimit_ ) && VULKAN_HPP_NOEXCEPT { instantiationBoundingBoxLimit = instantiationBoundingBoxLimit_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( clusterID, clusterFlags, triangleCount, vertexCount, positionTruncateBitCount, indexType, opacityMicromapIndexType, baseGeometryIndexAndGeometryFlags, indexBufferStride, vertexBufferStride, geometryIndexAndFlagsBufferStride, opacityMicromapIndexBufferStride, indexBuffer, vertexBuffer, geometryIndexAndFlagsBuffer, opacityMicromapArray, opacityMicromapIndexBuffer, instantiationBoundingBoxLimit ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( clusterID == rhs.clusterID ) && ( clusterFlags == rhs.clusterFlags ) && ( triangleCount == rhs.triangleCount ) && ( vertexCount == rhs.vertexCount ) && ( positionTruncateBitCount == rhs.positionTruncateBitCount ) && ( indexType == rhs.indexType ) && ( opacityMicromapIndexType == rhs.opacityMicromapIndexType ) && ( baseGeometryIndexAndGeometryFlags == rhs.baseGeometryIndexAndGeometryFlags ) && ( indexBufferStride == rhs.indexBufferStride ) && ( vertexBufferStride == rhs.vertexBufferStride ) && ( geometryIndexAndFlagsBufferStride == rhs.geometryIndexAndFlagsBufferStride ) && ( opacityMicromapIndexBufferStride == rhs.opacityMicromapIndexBufferStride ) && ( indexBuffer == rhs.indexBuffer ) && ( vertexBuffer == rhs.vertexBuffer ) && ( geometryIndexAndFlagsBuffer == rhs.geometryIndexAndFlagsBuffer ) && ( opacityMicromapArray == rhs.opacityMicromapArray ) && ( opacityMicromapIndexBuffer == rhs.opacityMicromapIndexBuffer ) && ( instantiationBoundingBoxLimit == rhs.instantiationBoundingBoxLimit ); # endif } bool operator!=( ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t clusterID = {}; ClusterAccelerationStructureClusterFlagsNV clusterFlags = {}; uint32_t triangleCount : 9; uint32_t vertexCount : 9; uint32_t positionTruncateBitCount : 6; uint32_t indexType : 4; uint32_t opacityMicromapIndexType : 4; ClusterAccelerationStructureGeometryIndexAndGeometryFlagsNV baseGeometryIndexAndGeometryFlags = {}; uint16_t indexBufferStride = {}; uint16_t vertexBufferStride = {}; uint16_t geometryIndexAndFlagsBufferStride = {}; uint16_t opacityMicromapIndexBufferStride = {}; DeviceAddress indexBuffer = {}; DeviceAddress vertexBuffer = {}; DeviceAddress geometryIndexAndFlagsBuffer = {}; DeviceAddress opacityMicromapArray = {}; DeviceAddress opacityMicromapIndexBuffer = {}; DeviceAddress instantiationBoundingBoxLimit = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureBuildTriangleClusterTemplateInfoNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureClustersBottomLevelInputNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureClustersBottomLevelInputNV.html struct ClusterAccelerationStructureClustersBottomLevelInputNV { using NativeType = VkClusterAccelerationStructureClustersBottomLevelInputNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( uint32_t maxTotalClusterCount_ = {}, uint32_t maxClusterCountPerAccelerationStructure_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxTotalClusterCount{ maxTotalClusterCount_ } , maxClusterCountPerAccelerationStructure{ maxClusterCountPerAccelerationStructure_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureClustersBottomLevelInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureClustersBottomLevelInputNV( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureClustersBottomLevelInputNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureClustersBottomLevelInputNV & operator=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureClustersBottomLevelInputNV & operator=( VkClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) & VULKAN_HPP_NOEXCEPT { maxTotalClusterCount = maxTotalClusterCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV && setMaxTotalClusterCount( uint32_t maxTotalClusterCount_ ) && VULKAN_HPP_NOEXCEPT { maxTotalClusterCount = maxTotalClusterCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV & setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureClustersBottomLevelInputNV && setMaxClusterCountPerAccelerationStructure( uint32_t maxClusterCountPerAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { maxClusterCountPerAccelerationStructure = maxClusterCountPerAccelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureClustersBottomLevelInputNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureClustersBottomLevelInputNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureClustersBottomLevelInputNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureClustersBottomLevelInputNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxTotalClusterCount, maxClusterCountPerAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureClustersBottomLevelInputNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTotalClusterCount == rhs.maxTotalClusterCount ) && ( maxClusterCountPerAccelerationStructure == rhs.maxClusterCountPerAccelerationStructure ); # endif } bool operator!=( ClusterAccelerationStructureClustersBottomLevelInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eClusterAccelerationStructureClustersBottomLevelInputNV; void * pNext = {}; uint32_t maxTotalClusterCount = {}; uint32_t maxClusterCountPerAccelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureClustersBottomLevelInputNV; }; #endif template <> struct CppType { using Type = ClusterAccelerationStructureClustersBottomLevelInputNV; }; // wrapper struct for struct VkClusterAccelerationStructureTriangleClusterInputNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureTriangleClusterInputNV.html struct ClusterAccelerationStructureTriangleClusterInputNV { using NativeType = VkClusterAccelerationStructureTriangleClusterInputNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureTriangleClusterInputNV( Format vertexFormat_ = Format::eUndefined, uint32_t maxGeometryIndexValue_ = {}, uint32_t maxClusterUniqueGeometryCount_ = {}, uint32_t maxClusterTriangleCount_ = {}, uint32_t maxClusterVertexCount_ = {}, uint32_t maxTotalTriangleCount_ = {}, uint32_t maxTotalVertexCount_ = {}, uint32_t minPositionTruncateBitCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexFormat{ vertexFormat_ } , maxGeometryIndexValue{ maxGeometryIndexValue_ } , maxClusterUniqueGeometryCount{ maxClusterUniqueGeometryCount_ } , maxClusterTriangleCount{ maxClusterTriangleCount_ } , maxClusterVertexCount{ maxClusterVertexCount_ } , maxTotalTriangleCount{ maxTotalTriangleCount_ } , maxTotalVertexCount{ maxTotalVertexCount_ } , minPositionTruncateBitCount{ minPositionTruncateBitCount_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureTriangleClusterInputNV( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureTriangleClusterInputNV( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureTriangleClusterInputNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureTriangleClusterInputNV & operator=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureTriangleClusterInputNV & operator=( VkClusterAccelerationStructureTriangleClusterInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setVertexFormat( Format vertexFormat_ ) & VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setVertexFormat( Format vertexFormat_ ) && VULKAN_HPP_NOEXCEPT { vertexFormat = vertexFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) & VULKAN_HPP_NOEXCEPT { maxGeometryIndexValue = maxGeometryIndexValue_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxGeometryIndexValue( uint32_t maxGeometryIndexValue_ ) && VULKAN_HPP_NOEXCEPT { maxGeometryIndexValue = maxGeometryIndexValue_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) & VULKAN_HPP_NOEXCEPT { maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxClusterUniqueGeometryCount( uint32_t maxClusterUniqueGeometryCount_ ) && VULKAN_HPP_NOEXCEPT { maxClusterUniqueGeometryCount = maxClusterUniqueGeometryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) & VULKAN_HPP_NOEXCEPT { maxClusterTriangleCount = maxClusterTriangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxClusterTriangleCount( uint32_t maxClusterTriangleCount_ ) && VULKAN_HPP_NOEXCEPT { maxClusterTriangleCount = maxClusterTriangleCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) & VULKAN_HPP_NOEXCEPT { maxClusterVertexCount = maxClusterVertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxClusterVertexCount( uint32_t maxClusterVertexCount_ ) && VULKAN_HPP_NOEXCEPT { maxClusterVertexCount = maxClusterVertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) & VULKAN_HPP_NOEXCEPT { maxTotalTriangleCount = maxTotalTriangleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxTotalTriangleCount( uint32_t maxTotalTriangleCount_ ) && VULKAN_HPP_NOEXCEPT { maxTotalTriangleCount = maxTotalTriangleCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) & VULKAN_HPP_NOEXCEPT { maxTotalVertexCount = maxTotalVertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMaxTotalVertexCount( uint32_t maxTotalVertexCount_ ) && VULKAN_HPP_NOEXCEPT { maxTotalVertexCount = maxTotalVertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV & setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) & VULKAN_HPP_NOEXCEPT { minPositionTruncateBitCount = minPositionTruncateBitCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureTriangleClusterInputNV && setMinPositionTruncateBitCount( uint32_t minPositionTruncateBitCount_ ) && VULKAN_HPP_NOEXCEPT { minPositionTruncateBitCount = minPositionTruncateBitCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureTriangleClusterInputNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureTriangleClusterInputNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureTriangleClusterInputNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureTriangleClusterInputNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vertexFormat, maxGeometryIndexValue, maxClusterUniqueGeometryCount, maxClusterTriangleCount, maxClusterVertexCount, maxTotalTriangleCount, maxTotalVertexCount, minPositionTruncateBitCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureTriangleClusterInputNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexFormat == rhs.vertexFormat ) && ( maxGeometryIndexValue == rhs.maxGeometryIndexValue ) && ( maxClusterUniqueGeometryCount == rhs.maxClusterUniqueGeometryCount ) && ( maxClusterTriangleCount == rhs.maxClusterTriangleCount ) && ( maxClusterVertexCount == rhs.maxClusterVertexCount ) && ( maxTotalTriangleCount == rhs.maxTotalTriangleCount ) && ( maxTotalVertexCount == rhs.maxTotalVertexCount ) && ( minPositionTruncateBitCount == rhs.minPositionTruncateBitCount ); # endif } bool operator!=( ClusterAccelerationStructureTriangleClusterInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eClusterAccelerationStructureTriangleClusterInputNV; void * pNext = {}; Format vertexFormat = Format::eUndefined; uint32_t maxGeometryIndexValue = {}; uint32_t maxClusterUniqueGeometryCount = {}; uint32_t maxClusterTriangleCount = {}; uint32_t maxClusterVertexCount = {}; uint32_t maxTotalTriangleCount = {}; uint32_t maxTotalVertexCount = {}; uint32_t minPositionTruncateBitCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureTriangleClusterInputNV; }; #endif template <> struct CppType { using Type = ClusterAccelerationStructureTriangleClusterInputNV; }; // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInputNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInputNV.html struct ClusterAccelerationStructureMoveObjectsInputNV { using NativeType = VkClusterAccelerationStructureMoveObjectsInputNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureTypeNV type_ = ClusterAccelerationStructureTypeNV::eClustersBottomLevel, Bool32 noMoveOverlap_ = {}, DeviceSize maxMovedBytes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , noMoveOverlap{ noMoveOverlap_ } , maxMovedBytes{ maxMovedBytes_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInputNV( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureMoveObjectsInputNV( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureMoveObjectsInputNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureMoveObjectsInputNV & operator=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureMoveObjectsInputNV & operator=( VkClusterAccelerationStructureMoveObjectsInputNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setType( ClusterAccelerationStructureTypeNV type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV && setType( ClusterAccelerationStructureTypeNV type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setNoMoveOverlap( Bool32 noMoveOverlap_ ) & VULKAN_HPP_NOEXCEPT { noMoveOverlap = noMoveOverlap_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV && setNoMoveOverlap( Bool32 noMoveOverlap_ ) && VULKAN_HPP_NOEXCEPT { noMoveOverlap = noMoveOverlap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV & setMaxMovedBytes( DeviceSize maxMovedBytes_ ) & VULKAN_HPP_NOEXCEPT { maxMovedBytes = maxMovedBytes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInputNV && setMaxMovedBytes( DeviceSize maxMovedBytes_ ) && VULKAN_HPP_NOEXCEPT { maxMovedBytes = maxMovedBytes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureMoveObjectsInputNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInputNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInputNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInputNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, noMoveOverlap, maxMovedBytes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureMoveObjectsInputNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( noMoveOverlap == rhs.noMoveOverlap ) && ( maxMovedBytes == rhs.maxMovedBytes ); # endif } bool operator!=( ClusterAccelerationStructureMoveObjectsInputNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eClusterAccelerationStructureMoveObjectsInputNV; void * pNext = {}; ClusterAccelerationStructureTypeNV type = ClusterAccelerationStructureTypeNV::eClustersBottomLevel; Bool32 noMoveOverlap = {}; DeviceSize maxMovedBytes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureMoveObjectsInputNV; }; #endif template <> struct CppType { using Type = ClusterAccelerationStructureMoveObjectsInputNV; }; union ClusterAccelerationStructureOpInputNV { using NativeType = VkClusterAccelerationStructureOpInputNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ = {} ) : pClustersBottomLevel( pClustersBottomLevel_ ) { } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) : pTriangleClusters( pTriangleClusters_ ) { } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV( ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) : pMoveObjects( pMoveObjects_ ) { } #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPClustersBottomLevel( ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) & VULKAN_HPP_NOEXCEPT { pClustersBottomLevel = pClustersBottomLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV && setPClustersBottomLevel( ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel_ ) && VULKAN_HPP_NOEXCEPT { pClustersBottomLevel = pClustersBottomLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPTriangleClusters( ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) & VULKAN_HPP_NOEXCEPT { pTriangleClusters = pTriangleClusters_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV && setPTriangleClusters( ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters_ ) && VULKAN_HPP_NOEXCEPT { pTriangleClusters = pTriangleClusters_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV & setPMoveObjects( ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) & VULKAN_HPP_NOEXCEPT { pMoveObjects = pMoveObjects_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureOpInputNV && setPMoveObjects( ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects_ ) && VULKAN_HPP_NOEXCEPT { pMoveObjects = pMoveObjects_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureOpInputNV const &() const { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureOpInputNV &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS ClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; ClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; ClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; #else VkClusterAccelerationStructureClustersBottomLevelInputNV * pClustersBottomLevel; VkClusterAccelerationStructureTriangleClusterInputNV * pTriangleClusters; VkClusterAccelerationStructureMoveObjectsInputNV * pMoveObjects; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureOpInputNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureInputInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInputInfoNV.html struct ClusterAccelerationStructureInputInfoNV { using NativeType = VkClusterAccelerationStructureInputInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureInputInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( uint32_t maxAccelerationStructureCount_ = {}, BuildAccelerationStructureFlagsKHR flags_ = {}, ClusterAccelerationStructureOpTypeNV opType_ = ClusterAccelerationStructureOpTypeNV::eMoveObjects, ClusterAccelerationStructureOpModeNV opMode_ = ClusterAccelerationStructureOpModeNV::eImplicitDestinations, ClusterAccelerationStructureOpInputNV opInput_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxAccelerationStructureCount{ maxAccelerationStructureCount_ } , flags{ flags_ } , opType{ opType_ } , opMode{ opMode_ } , opInput{ opInput_ } { } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureInputInfoNV( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureInputInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureInputInfoNV & operator=( ClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureInputInfoNV & operator=( VkClusterAccelerationStructureInputInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) & VULKAN_HPP_NOEXCEPT { maxAccelerationStructureCount = maxAccelerationStructureCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setMaxAccelerationStructureCount( uint32_t maxAccelerationStructureCount_ ) && VULKAN_HPP_NOEXCEPT { maxAccelerationStructureCount = maxAccelerationStructureCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setFlags( BuildAccelerationStructureFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setFlags( BuildAccelerationStructureFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpType( ClusterAccelerationStructureOpTypeNV opType_ ) & VULKAN_HPP_NOEXCEPT { opType = opType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setOpType( ClusterAccelerationStructureOpTypeNV opType_ ) && VULKAN_HPP_NOEXCEPT { opType = opType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpMode( ClusterAccelerationStructureOpModeNV opMode_ ) & VULKAN_HPP_NOEXCEPT { opMode = opMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setOpMode( ClusterAccelerationStructureOpModeNV opMode_ ) && VULKAN_HPP_NOEXCEPT { opMode = opMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV & setOpInput( ClusterAccelerationStructureOpInputNV const & opInput_ ) & VULKAN_HPP_NOEXCEPT { opInput = opInput_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInputInfoNV && setOpInput( ClusterAccelerationStructureOpInputNV const & opInput_ ) && VULKAN_HPP_NOEXCEPT { opInput = opInput_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureInputInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureInputInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureInputInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureInputInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxAccelerationStructureCount, flags, opType, opMode, opInput ); } #endif public: StructureType sType = StructureType::eClusterAccelerationStructureInputInfoNV; void * pNext = {}; uint32_t maxAccelerationStructureCount = {}; BuildAccelerationStructureFlagsKHR flags = {}; ClusterAccelerationStructureOpTypeNV opType = ClusterAccelerationStructureOpTypeNV::eMoveObjects; ClusterAccelerationStructureOpModeNV opMode = ClusterAccelerationStructureOpModeNV::eImplicitDestinations; ClusterAccelerationStructureOpInputNV opInput = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureInputInfoNV; }; #endif template <> struct CppType { using Type = ClusterAccelerationStructureInputInfoNV; }; // wrapper struct for struct VkStridedDeviceAddressRegionKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRegionKHR.html struct StridedDeviceAddressRegionKHR { using NativeType = VkStridedDeviceAddressRegionKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( DeviceAddress deviceAddress_ = {}, DeviceSize stride_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : deviceAddress{ deviceAddress_ } , stride{ stride_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : StridedDeviceAddressRegionKHR( *reinterpret_cast( &rhs ) ) { } StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setStride( DeviceSize stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR && setStride( DeviceSize stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkStridedDeviceAddressRegionKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressRegionKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkStridedDeviceAddressRegionKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( deviceAddress, stride, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( StridedDeviceAddressRegionKHR const & ) const = default; #else bool operator==( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( deviceAddress == rhs.deviceAddress ) && ( stride == rhs.stride ) && ( size == rhs.size ); # endif } bool operator!=( StridedDeviceAddressRegionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress deviceAddress = {}; DeviceSize stride = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = StridedDeviceAddressRegionKHR; }; #endif // wrapper struct for struct VkClusterAccelerationStructureCommandsInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureCommandsInfoNV.html struct ClusterAccelerationStructureCommandsInfoNV { using NativeType = VkClusterAccelerationStructureCommandsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eClusterAccelerationStructureCommandsInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureInputInfoNV input_ = {}, DeviceAddress dstImplicitData_ = {}, DeviceAddress scratchData_ = {}, StridedDeviceAddressRegionKHR dstAddressesArray_ = {}, StridedDeviceAddressRegionKHR dstSizesArray_ = {}, StridedDeviceAddressRegionKHR srcInfosArray_ = {}, DeviceAddress srcInfosCount_ = {}, ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , input{ input_ } , dstImplicitData{ dstImplicitData_ } , scratchData{ scratchData_ } , dstAddressesArray{ dstAddressesArray_ } , dstSizesArray{ dstSizesArray_ } , srcInfosArray{ srcInfosArray_ } , srcInfosCount{ srcInfosCount_ } , addressResolutionFlags{ addressResolutionFlags_ } { } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureCommandsInfoNV( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureCommandsInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureCommandsInfoNV & operator=( ClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureCommandsInfoNV & operator=( VkClusterAccelerationStructureCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setInput( ClusterAccelerationStructureInputInfoNV const & input_ ) & VULKAN_HPP_NOEXCEPT { input = input_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setInput( ClusterAccelerationStructureInputInfoNV const & input_ ) && VULKAN_HPP_NOEXCEPT { input = input_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstImplicitData( DeviceAddress dstImplicitData_ ) & VULKAN_HPP_NOEXCEPT { dstImplicitData = dstImplicitData_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setDstImplicitData( DeviceAddress dstImplicitData_ ) && VULKAN_HPP_NOEXCEPT { dstImplicitData = dstImplicitData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setScratchData( DeviceAddress scratchData_ ) & VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setScratchData( DeviceAddress scratchData_ ) && VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstAddressesArray( StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) & VULKAN_HPP_NOEXCEPT { dstAddressesArray = dstAddressesArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setDstAddressesArray( StridedDeviceAddressRegionKHR const & dstAddressesArray_ ) && VULKAN_HPP_NOEXCEPT { dstAddressesArray = dstAddressesArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setDstSizesArray( StridedDeviceAddressRegionKHR const & dstSizesArray_ ) & VULKAN_HPP_NOEXCEPT { dstSizesArray = dstSizesArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setDstSizesArray( StridedDeviceAddressRegionKHR const & dstSizesArray_ ) && VULKAN_HPP_NOEXCEPT { dstSizesArray = dstSizesArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setSrcInfosArray( StridedDeviceAddressRegionKHR const & srcInfosArray_ ) & VULKAN_HPP_NOEXCEPT { srcInfosArray = srcInfosArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setSrcInfosArray( StridedDeviceAddressRegionKHR const & srcInfosArray_ ) && VULKAN_HPP_NOEXCEPT { srcInfosArray = srcInfosArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setSrcInfosCount( DeviceAddress srcInfosCount_ ) & VULKAN_HPP_NOEXCEPT { srcInfosCount = srcInfosCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setSrcInfosCount( DeviceAddress srcInfosCount_ ) && VULKAN_HPP_NOEXCEPT { srcInfosCount = srcInfosCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV & setAddressResolutionFlags( ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) & VULKAN_HPP_NOEXCEPT { addressResolutionFlags = addressResolutionFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureCommandsInfoNV && setAddressResolutionFlags( ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags_ ) && VULKAN_HPP_NOEXCEPT { addressResolutionFlags = addressResolutionFlags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureCommandsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureCommandsInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, input, dstImplicitData, scratchData, dstAddressesArray, dstSizesArray, srcInfosArray, srcInfosCount, addressResolutionFlags ); } #endif public: StructureType sType = StructureType::eClusterAccelerationStructureCommandsInfoNV; void * pNext = {}; ClusterAccelerationStructureInputInfoNV input = {}; DeviceAddress dstImplicitData = {}; DeviceAddress scratchData = {}; StridedDeviceAddressRegionKHR dstAddressesArray = {}; StridedDeviceAddressRegionKHR dstSizesArray = {}; StridedDeviceAddressRegionKHR srcInfosArray = {}; DeviceAddress srcInfosCount = {}; ClusterAccelerationStructureAddressResolutionFlagsNV addressResolutionFlags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureCommandsInfoNV; }; #endif template <> struct CppType { using Type = ClusterAccelerationStructureCommandsInfoNV; }; // wrapper struct for struct VkClusterAccelerationStructureGetTemplateIndicesInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureGetTemplateIndicesInfoNV.html struct ClusterAccelerationStructureGetTemplateIndicesInfoNV { using NativeType = VkClusterAccelerationStructureGetTemplateIndicesInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGetTemplateIndicesInfoNV( DeviceAddress clusterTemplateAddress_ = {} ) VULKAN_HPP_NOEXCEPT : clusterTemplateAddress{ clusterTemplateAddress_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureGetTemplateIndicesInfoNV( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureGetTemplateIndicesInfoNV( VkClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureGetTemplateIndicesInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureGetTemplateIndicesInfoNV & operator=( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureGetTemplateIndicesInfoNV & operator=( VkClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGetTemplateIndicesInfoNV & setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) & VULKAN_HPP_NOEXCEPT { clusterTemplateAddress = clusterTemplateAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureGetTemplateIndicesInfoNV && setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) && VULKAN_HPP_NOEXCEPT { clusterTemplateAddress = clusterTemplateAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureGetTemplateIndicesInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( clusterTemplateAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( clusterTemplateAddress == rhs.clusterTemplateAddress ); # endif } bool operator!=( ClusterAccelerationStructureGetTemplateIndicesInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress clusterTemplateAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureGetTemplateIndicesInfoNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureInstantiateClusterInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureInstantiateClusterInfoNV.html struct ClusterAccelerationStructureInstantiateClusterInfoNV { using NativeType = VkClusterAccelerationStructureInstantiateClusterInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureInstantiateClusterInfoNV( uint32_t clusterIdOffset_ = {}, uint32_t geometryIndexOffset_ = {}, uint32_t reserved_ = {}, DeviceAddress clusterTemplateAddress_ = {}, StridedDeviceAddressNV vertexBuffer_ = {} ) VULKAN_HPP_NOEXCEPT : clusterIdOffset{ clusterIdOffset_ } , geometryIndexOffset{ geometryIndexOffset_ } , reserved{ reserved_ } , clusterTemplateAddress{ clusterTemplateAddress_ } , vertexBuffer{ vertexBuffer_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureInstantiateClusterInfoNV( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureInstantiateClusterInfoNV( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureInstantiateClusterInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureInstantiateClusterInfoNV & operator=( VkClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterIdOffset( uint32_t clusterIdOffset_ ) & VULKAN_HPP_NOEXCEPT { clusterIdOffset = clusterIdOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV && setClusterIdOffset( uint32_t clusterIdOffset_ ) && VULKAN_HPP_NOEXCEPT { clusterIdOffset = clusterIdOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) & VULKAN_HPP_NOEXCEPT { geometryIndexOffset = geometryIndexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV && setGeometryIndexOffset( uint32_t geometryIndexOffset_ ) && VULKAN_HPP_NOEXCEPT { geometryIndexOffset = geometryIndexOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setReserved( uint32_t reserved_ ) & VULKAN_HPP_NOEXCEPT { reserved = reserved_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV && setReserved( uint32_t reserved_ ) && VULKAN_HPP_NOEXCEPT { reserved = reserved_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) & VULKAN_HPP_NOEXCEPT { clusterTemplateAddress = clusterTemplateAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV && setClusterTemplateAddress( DeviceAddress clusterTemplateAddress_ ) && VULKAN_HPP_NOEXCEPT { clusterTemplateAddress = clusterTemplateAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV & setVertexBuffer( StridedDeviceAddressNV const & vertexBuffer_ ) & VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureInstantiateClusterInfoNV && setVertexBuffer( StridedDeviceAddressNV const & vertexBuffer_ ) && VULKAN_HPP_NOEXCEPT { vertexBuffer = vertexBuffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureInstantiateClusterInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureInstantiateClusterInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureInstantiateClusterInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureInstantiateClusterInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( clusterIdOffset, geometryIndexOffset, reserved, clusterTemplateAddress, vertexBuffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureInstantiateClusterInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( clusterIdOffset == rhs.clusterIdOffset ) && ( geometryIndexOffset == rhs.geometryIndexOffset ) && ( reserved == rhs.reserved ) && ( clusterTemplateAddress == rhs.clusterTemplateAddress ) && ( vertexBuffer == rhs.vertexBuffer ); # endif } bool operator!=( ClusterAccelerationStructureInstantiateClusterInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t clusterIdOffset = {}; uint32_t geometryIndexOffset : 24; uint32_t reserved : 8; DeviceAddress clusterTemplateAddress = {}; StridedDeviceAddressNV vertexBuffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureInstantiateClusterInfoNV; }; #endif // wrapper struct for struct VkClusterAccelerationStructureMoveObjectsInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkClusterAccelerationStructureMoveObjectsInfoNV.html struct ClusterAccelerationStructureMoveObjectsInfoNV { using NativeType = VkClusterAccelerationStructureMoveObjectsInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( DeviceAddress srcAccelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : srcAccelerationStructure{ srcAccelerationStructure_ } { } VULKAN_HPP_CONSTEXPR ClusterAccelerationStructureMoveObjectsInfoNV( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ClusterAccelerationStructureMoveObjectsInfoNV( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ClusterAccelerationStructureMoveObjectsInfoNV( *reinterpret_cast( &rhs ) ) { } ClusterAccelerationStructureMoveObjectsInfoNV & operator=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ClusterAccelerationStructureMoveObjectsInfoNV & operator=( VkClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV & setSrcAccelerationStructure( DeviceAddress srcAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 ClusterAccelerationStructureMoveObjectsInfoNV && setSrcAccelerationStructure( DeviceAddress srcAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { srcAccelerationStructure = srcAccelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkClusterAccelerationStructureMoveObjectsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkClusterAccelerationStructureMoveObjectsInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ClusterAccelerationStructureMoveObjectsInfoNV const & ) const = default; #else bool operator==( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcAccelerationStructure == rhs.srcAccelerationStructure ); # endif } bool operator!=( ClusterAccelerationStructureMoveObjectsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress srcAccelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ClusterAccelerationStructureMoveObjectsInfoNV; }; #endif // wrapper struct for struct VkCoarseSampleLocationNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleLocationNV.html struct CoarseSampleLocationNV { using NativeType = VkCoarseSampleLocationNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT : pixelX{ pixelX_ } , pixelY{ pixelY_ } , sample{ sample_ } { } VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT : CoarseSampleLocationNV( *reinterpret_cast( &rhs ) ) { } CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) & VULKAN_HPP_NOEXCEPT { pixelX = pixelX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV && setPixelX( uint32_t pixelX_ ) && VULKAN_HPP_NOEXCEPT { pixelX = pixelX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) & VULKAN_HPP_NOEXCEPT { pixelY = pixelY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV && setPixelY( uint32_t pixelY_ ) && VULKAN_HPP_NOEXCEPT { pixelY = pixelY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & setSample( uint32_t sample_ ) & VULKAN_HPP_NOEXCEPT { sample = sample_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV && setSample( uint32_t sample_ ) && VULKAN_HPP_NOEXCEPT { sample = sample_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCoarseSampleLocationNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCoarseSampleLocationNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCoarseSampleLocationNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( pixelX, pixelY, sample ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CoarseSampleLocationNV const & ) const = default; #else bool operator==( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( pixelX == rhs.pixelX ) && ( pixelY == rhs.pixelY ) && ( sample == rhs.sample ); # endif } bool operator!=( CoarseSampleLocationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t pixelX = {}; uint32_t pixelY = {}; uint32_t sample = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CoarseSampleLocationNV; }; #endif // wrapper struct for struct VkCoarseSampleOrderCustomNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCoarseSampleOrderCustomNV.html struct CoarseSampleOrderCustomNV { using NativeType = VkCoarseSampleOrderCustomNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV shadingRate_ = ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT : shadingRate{ shadingRate_ } , sampleCount{ sampleCount_ } , sampleLocationCount{ sampleLocationCount_ } , pSampleLocations{ pSampleLocations_ } { } VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT : CoarseSampleOrderCustomNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CoarseSampleOrderCustomNV( ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, ArrayProxyNoTemporaries const & sampleLocations_ ) : shadingRate( shadingRate_ ) , sampleCount( sampleCount_ ) , sampleLocationCount( static_cast( sampleLocations_.size() ) ) , pSampleLocations( sampleLocations_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setShadingRate( ShadingRatePaletteEntryNV shadingRate_ ) & VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV && setShadingRate( ShadingRatePaletteEntryNV shadingRate_ ) && VULKAN_HPP_NOEXCEPT { shadingRate = shadingRate_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) & VULKAN_HPP_NOEXCEPT { sampleCount = sampleCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV && setSampleCount( uint32_t sampleCount_ ) && VULKAN_HPP_NOEXCEPT { sampleCount = sampleCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) & VULKAN_HPP_NOEXCEPT { sampleLocationCount = sampleLocationCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV && setSampleLocationCount( uint32_t sampleLocationCount_ ) && VULKAN_HPP_NOEXCEPT { sampleLocationCount = sampleLocationCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & setPSampleLocations( const CoarseSampleLocationNV * pSampleLocations_ ) & VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return *this; } VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV && setPSampleLocations( const CoarseSampleLocationNV * pSampleLocations_ ) && VULKAN_HPP_NOEXCEPT { pSampleLocations = pSampleLocations_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CoarseSampleOrderCustomNV & setSampleLocations( ArrayProxyNoTemporaries const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT { sampleLocationCount = static_cast( sampleLocations_.size() ); pSampleLocations = sampleLocations_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCoarseSampleOrderCustomNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCoarseSampleOrderCustomNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCoarseSampleOrderCustomNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( shadingRate, sampleCount, sampleLocationCount, pSampleLocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CoarseSampleOrderCustomNV const & ) const = default; #else bool operator==( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( shadingRate == rhs.shadingRate ) && ( sampleCount == rhs.sampleCount ) && ( sampleLocationCount == rhs.sampleLocationCount ) && ( pSampleLocations == rhs.pSampleLocations ); # endif } bool operator!=( CoarseSampleOrderCustomNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ShadingRatePaletteEntryNV shadingRate = ShadingRatePaletteEntryNV::eNoInvocations; uint32_t sampleCount = {}; uint32_t sampleLocationCount = {}; const CoarseSampleLocationNV * pSampleLocations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CoarseSampleOrderCustomNV; }; #endif // wrapper struct for struct VkColorBlendAdvancedEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendAdvancedEXT.html struct ColorBlendAdvancedEXT { using NativeType = VkColorBlendAdvancedEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( BlendOp advancedBlendOp_ = BlendOp::eAdd, Bool32 srcPremultiplied_ = {}, Bool32 dstPremultiplied_ = {}, BlendOverlapEXT blendOverlap_ = BlendOverlapEXT::eUncorrelated, Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT : advancedBlendOp{ advancedBlendOp_ } , srcPremultiplied{ srcPremultiplied_ } , dstPremultiplied{ dstPremultiplied_ } , blendOverlap{ blendOverlap_ } , clampResults{ clampResults_ } { } VULKAN_HPP_CONSTEXPR ColorBlendAdvancedEXT( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ColorBlendAdvancedEXT( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ColorBlendAdvancedEXT( *reinterpret_cast( &rhs ) ) { } ColorBlendAdvancedEXT & operator=( ColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ColorBlendAdvancedEXT & operator=( VkColorBlendAdvancedEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setAdvancedBlendOp( BlendOp advancedBlendOp_ ) & VULKAN_HPP_NOEXCEPT { advancedBlendOp = advancedBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT && setAdvancedBlendOp( BlendOp advancedBlendOp_ ) && VULKAN_HPP_NOEXCEPT { advancedBlendOp = advancedBlendOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setSrcPremultiplied( Bool32 srcPremultiplied_ ) & VULKAN_HPP_NOEXCEPT { srcPremultiplied = srcPremultiplied_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT && setSrcPremultiplied( Bool32 srcPremultiplied_ ) && VULKAN_HPP_NOEXCEPT { srcPremultiplied = srcPremultiplied_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setDstPremultiplied( Bool32 dstPremultiplied_ ) & VULKAN_HPP_NOEXCEPT { dstPremultiplied = dstPremultiplied_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT && setDstPremultiplied( Bool32 dstPremultiplied_ ) && VULKAN_HPP_NOEXCEPT { dstPremultiplied = dstPremultiplied_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setBlendOverlap( BlendOverlapEXT blendOverlap_ ) & VULKAN_HPP_NOEXCEPT { blendOverlap = blendOverlap_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT && setBlendOverlap( BlendOverlapEXT blendOverlap_ ) && VULKAN_HPP_NOEXCEPT { blendOverlap = blendOverlap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT & setClampResults( Bool32 clampResults_ ) & VULKAN_HPP_NOEXCEPT { clampResults = clampResults_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendAdvancedEXT && setClampResults( Bool32 clampResults_ ) && VULKAN_HPP_NOEXCEPT { clampResults = clampResults_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkColorBlendAdvancedEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkColorBlendAdvancedEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkColorBlendAdvancedEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkColorBlendAdvancedEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( advancedBlendOp, srcPremultiplied, dstPremultiplied, blendOverlap, clampResults ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ColorBlendAdvancedEXT const & ) const = default; #else bool operator==( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( advancedBlendOp == rhs.advancedBlendOp ) && ( srcPremultiplied == rhs.srcPremultiplied ) && ( dstPremultiplied == rhs.dstPremultiplied ) && ( blendOverlap == rhs.blendOverlap ) && ( clampResults == rhs.clampResults ); # endif } bool operator!=( ColorBlendAdvancedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: BlendOp advancedBlendOp = BlendOp::eAdd; Bool32 srcPremultiplied = {}; Bool32 dstPremultiplied = {}; BlendOverlapEXT blendOverlap = BlendOverlapEXT::eUncorrelated; Bool32 clampResults = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ColorBlendAdvancedEXT; }; #endif // wrapper struct for struct VkColorBlendEquationEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkColorBlendEquationEXT.html struct ColorBlendEquationEXT { using NativeType = VkColorBlendEquationEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT : srcColorBlendFactor{ srcColorBlendFactor_ } , dstColorBlendFactor{ dstColorBlendFactor_ } , colorBlendOp{ colorBlendOp_ } , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } , alphaBlendOp{ alphaBlendOp_ } { } VULKAN_HPP_CONSTEXPR ColorBlendEquationEXT( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ColorBlendEquationEXT( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ColorBlendEquationEXT( *reinterpret_cast( &rhs ) ) { } ColorBlendEquationEXT & operator=( ColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ColorBlendEquationEXT & operator=( VkColorBlendEquationEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setColorBlendOp( BlendOp colorBlendOp_ ) & VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setColorBlendOp( BlendOp colorBlendOp_ ) && VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT & setAlphaBlendOp( BlendOp alphaBlendOp_ ) & VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 ColorBlendEquationEXT && setAlphaBlendOp( BlendOp alphaBlendOp_ ) && VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkColorBlendEquationEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkColorBlendEquationEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkColorBlendEquationEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkColorBlendEquationEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ColorBlendEquationEXT const & ) const = default; #else bool operator==( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp ); # endif } bool operator!=( ColorBlendEquationEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: BlendFactor srcColorBlendFactor = BlendFactor::eZero; BlendFactor dstColorBlendFactor = BlendFactor::eZero; BlendOp colorBlendOp = BlendOp::eAdd; BlendFactor srcAlphaBlendFactor = BlendFactor::eZero; BlendFactor dstAlphaBlendFactor = BlendFactor::eZero; BlendOp alphaBlendOp = BlendOp::eAdd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ColorBlendEquationEXT; }; #endif // wrapper struct for struct VkCommandBufferAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferAllocateInfo.html struct CommandBufferAllocateInfo { using NativeType = VkCommandBufferAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandPool commandPool_ = {}, CommandBufferLevel level_ = CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , commandPool{ commandPool_ } , level{ level_ } , commandBufferCount{ commandBufferCount_ } { } VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferAllocateInfo( *reinterpret_cast( &rhs ) ) { } CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandPool( CommandPool commandPool_ ) & VULKAN_HPP_NOEXCEPT { commandPool = commandPool_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo && setCommandPool( CommandPool commandPool_ ) && VULKAN_HPP_NOEXCEPT { commandPool = commandPool_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setLevel( CommandBufferLevel level_ ) & VULKAN_HPP_NOEXCEPT { level = level_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo && setLevel( CommandBufferLevel level_ ) && VULKAN_HPP_NOEXCEPT { level = level_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) & VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo && setCommandBufferCount( uint32_t commandBufferCount_ ) && VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, commandPool, level, commandBufferCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferAllocateInfo const & ) const = default; #else bool operator==( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandPool == rhs.commandPool ) && ( level == rhs.level ) && ( commandBufferCount == rhs.commandBufferCount ); # endif } bool operator!=( CommandBufferAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferAllocateInfo; const void * pNext = {}; CommandPool commandPool = {}; CommandBufferLevel level = CommandBufferLevel::ePrimary; uint32_t commandBufferCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferAllocateInfo; }; #endif template <> struct CppType { using Type = CommandBufferAllocateInfo; }; // wrapper struct for struct VkCommandBufferInheritanceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceInfo.html struct CommandBufferInheritanceInfo { using NativeType = VkCommandBufferInheritanceInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( RenderPass renderPass_ = {}, uint32_t subpass_ = {}, Framebuffer framebuffer_ = {}, Bool32 occlusionQueryEnable_ = {}, QueryControlFlags queryFlags_ = {}, QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , renderPass{ renderPass_ } , subpass{ subpass_ } , framebuffer{ framebuffer_ } , occlusionQueryEnable{ occlusionQueryEnable_ } , queryFlags{ queryFlags_ } , pipelineStatistics{ pipelineStatistics_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceInfo( *reinterpret_cast( &rhs ) ) { } CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setRenderPass( RenderPass renderPass_ ) & VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setRenderPass( RenderPass renderPass_ ) && VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) & VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setSubpass( uint32_t subpass_ ) && VULKAN_HPP_NOEXCEPT { subpass = subpass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setFramebuffer( Framebuffer framebuffer_ ) & VULKAN_HPP_NOEXCEPT { framebuffer = framebuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setFramebuffer( Framebuffer framebuffer_ ) && VULKAN_HPP_NOEXCEPT { framebuffer = framebuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ ) & VULKAN_HPP_NOEXCEPT { occlusionQueryEnable = occlusionQueryEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setOcclusionQueryEnable( Bool32 occlusionQueryEnable_ ) && VULKAN_HPP_NOEXCEPT { occlusionQueryEnable = occlusionQueryEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setQueryFlags( QueryControlFlags queryFlags_ ) & VULKAN_HPP_NOEXCEPT { queryFlags = queryFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setQueryFlags( QueryControlFlags queryFlags_ ) && VULKAN_HPP_NOEXCEPT { queryFlags = queryFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) & VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo && setPipelineStatistics( QueryPipelineStatisticFlags pipelineStatistics_ ) && VULKAN_HPP_NOEXCEPT { pipelineStatistics = pipelineStatistics_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, renderPass, subpass, framebuffer, occlusionQueryEnable, queryFlags, pipelineStatistics ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceInfo const & ) const = default; #else bool operator==( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( framebuffer == rhs.framebuffer ) && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) && ( queryFlags == rhs.queryFlags ) && ( pipelineStatistics == rhs.pipelineStatistics ); # endif } bool operator!=( CommandBufferInheritanceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceInfo; const void * pNext = {}; RenderPass renderPass = {}; uint32_t subpass = {}; Framebuffer framebuffer = {}; Bool32 occlusionQueryEnable = {}; QueryControlFlags queryFlags = {}; QueryPipelineStatisticFlags pipelineStatistics = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceInfo; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceInfo; }; // wrapper struct for struct VkCommandBufferBeginInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferBeginInfo.html struct CommandBufferBeginInfo { using NativeType = VkCommandBufferBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferUsageFlags flags_ = {}, const CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pInheritanceInfo{ pInheritanceInfo_ } { } VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) { } CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setFlags( CommandBufferUsageFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo && setFlags( CommandBufferUsageFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & setPInheritanceInfo( const CommandBufferInheritanceInfo * pInheritanceInfo_ ) & VULKAN_HPP_NOEXCEPT { pInheritanceInfo = pInheritanceInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo && setPInheritanceInfo( const CommandBufferInheritanceInfo * pInheritanceInfo_ ) && VULKAN_HPP_NOEXCEPT { pInheritanceInfo = pInheritanceInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pInheritanceInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferBeginInfo const & ) const = default; #else bool operator==( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pInheritanceInfo == rhs.pInheritanceInfo ); # endif } bool operator!=( CommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferBeginInfo; const void * pNext = {}; CommandBufferUsageFlags flags = {}; const CommandBufferInheritanceInfo * pInheritanceInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferBeginInfo; }; #endif template <> struct CppType { using Type = CommandBufferBeginInfo; }; // wrapper struct for struct VkCommandBufferInheritanceConditionalRenderingInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html struct CommandBufferInheritanceConditionalRenderingInfoEXT { using NativeType = VkCommandBufferInheritanceConditionalRenderingInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( Bool32 conditionalRenderingEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , conditionalRenderingEnable{ conditionalRenderingEnable_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast( &rhs ) ) { } CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( Bool32 conditionalRenderingEnable_ ) & VULKAN_HPP_NOEXCEPT { conditionalRenderingEnable = conditionalRenderingEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT && setConditionalRenderingEnable( Bool32 conditionalRenderingEnable_ ) && VULKAN_HPP_NOEXCEPT { conditionalRenderingEnable = conditionalRenderingEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceConditionalRenderingInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, conditionalRenderingEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const & ) const = default; #else bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); # endif } bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; const void * pNext = {}; Bool32 conditionalRenderingEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceConditionalRenderingInfoEXT; }; // wrapper struct for struct VkCommandBufferInheritanceDescriptorHeapInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceDescriptorHeapInfoEXT.html struct CommandBufferInheritanceDescriptorHeapInfoEXT { using NativeType = VkCommandBufferInheritanceDescriptorHeapInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceDescriptorHeapInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceDescriptorHeapInfoEXT( const BindHeapInfoEXT * pSamplerHeapBindInfo_ = {}, const BindHeapInfoEXT * pResourceHeapBindInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pSamplerHeapBindInfo{ pSamplerHeapBindInfo_ } , pResourceHeapBindInfo{ pResourceHeapBindInfo_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceDescriptorHeapInfoEXT( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceDescriptorHeapInfoEXT( VkCommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceDescriptorHeapInfoEXT( *reinterpret_cast( &rhs ) ) { } CommandBufferInheritanceDescriptorHeapInfoEXT & operator=( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceDescriptorHeapInfoEXT & operator=( VkCommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPSamplerHeapBindInfo( const BindHeapInfoEXT * pSamplerHeapBindInfo_ ) & VULKAN_HPP_NOEXCEPT { pSamplerHeapBindInfo = pSamplerHeapBindInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPSamplerHeapBindInfo( const BindHeapInfoEXT * pSamplerHeapBindInfo_ ) && VULKAN_HPP_NOEXCEPT { pSamplerHeapBindInfo = pSamplerHeapBindInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT & setPResourceHeapBindInfo( const BindHeapInfoEXT * pResourceHeapBindInfo_ ) & VULKAN_HPP_NOEXCEPT { pResourceHeapBindInfo = pResourceHeapBindInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceDescriptorHeapInfoEXT && setPResourceHeapBindInfo( const BindHeapInfoEXT * pResourceHeapBindInfo_ ) && VULKAN_HPP_NOEXCEPT { pResourceHeapBindInfo = pResourceHeapBindInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceDescriptorHeapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceDescriptorHeapInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceDescriptorHeapInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceDescriptorHeapInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pSamplerHeapBindInfo, pResourceHeapBindInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceDescriptorHeapInfoEXT const & ) const = default; #else bool operator==( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pSamplerHeapBindInfo == rhs.pSamplerHeapBindInfo ) && ( pResourceHeapBindInfo == rhs.pResourceHeapBindInfo ); # endif } bool operator!=( CommandBufferInheritanceDescriptorHeapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceDescriptorHeapInfoEXT; const void * pNext = {}; const BindHeapInfoEXT * pSamplerHeapBindInfo = {}; const BindHeapInfoEXT * pResourceHeapBindInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceDescriptorHeapInfoEXT; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceDescriptorHeapInfoEXT; }; // wrapper struct for struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html struct CommandBufferInheritanceRenderPassTransformInfoQCOM { using NativeType = VkCommandBufferInheritanceRenderPassTransformInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, Rect2D renderArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , transform{ transform_ } , renderArea{ renderArea_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast( &rhs ) ) { } CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( SurfaceTransformFlagBitsKHR transform_ ) & VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM && setTransform( SurfaceTransformFlagBitsKHR transform_ ) && VULKAN_HPP_NOEXCEPT { transform = transform_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( Rect2D const & renderArea_ ) & VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM && setRenderArea( Rect2D const & renderArea_ ) && VULKAN_HPP_NOEXCEPT { renderArea = renderArea_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, transform, renderArea ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const & ) const = default; #else bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ) && ( renderArea == rhs.renderArea ); # endif } bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM; const void * pNext = {}; SurfaceTransformFlagBitsKHR transform = SurfaceTransformFlagBitsKHR::eIdentity; Rect2D renderArea = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM; }; // wrapper struct for struct VkCommandBufferInheritanceRenderingInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceRenderingInfo.html struct CommandBufferInheritanceRenderingInfo { using NativeType = VkCommandBufferInheritanceRenderingInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderingInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( RenderingFlags flags_ = {}, uint32_t viewMask_ = {}, uint32_t colorAttachmentCount_ = {}, const Format * pColorAttachmentFormats_ = {}, Format depthAttachmentFormat_ = Format::eUndefined, Format stencilAttachmentFormat_ = Format::eUndefined, SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , viewMask{ viewMask_ } , colorAttachmentCount{ colorAttachmentCount_ } , pColorAttachmentFormats{ pColorAttachmentFormats_ } , depthAttachmentFormat{ depthAttachmentFormat_ } , stencilAttachmentFormat{ stencilAttachmentFormat_ } , rasterizationSamples{ rasterizationSamples_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderingInfo( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceRenderingInfo( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceRenderingInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CommandBufferInheritanceRenderingInfo( RenderingFlags flags_, uint32_t viewMask_, ArrayProxyNoTemporaries const & colorAttachmentFormats_, Format depthAttachmentFormat_ = Format::eUndefined, Format stencilAttachmentFormat_ = Format::eUndefined, SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , viewMask( viewMask_ ) , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) , pColorAttachmentFormats( colorAttachmentFormats_.data() ) , depthAttachmentFormat( depthAttachmentFormat_ ) , stencilAttachmentFormat( stencilAttachmentFormat_ ) , rasterizationSamples( rasterizationSamples_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CommandBufferInheritanceRenderingInfo & operator=( CommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceRenderingInfo & operator=( VkCommandBufferInheritanceRenderingInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setFlags( RenderingFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setFlags( RenderingFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setViewMask( uint32_t viewMask_ ) & VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setViewMask( uint32_t viewMask_ ) && VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) & VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setColorAttachmentCount( uint32_t colorAttachmentCount_ ) && VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) & VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) && VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CommandBufferInheritanceRenderingInfo & setColorAttachmentFormats( ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); pColorAttachmentFormats = colorAttachmentFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) & VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setDepthAttachmentFormat( Format depthAttachmentFormat_ ) && VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) & VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) && VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) & VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderingInfo && setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) && VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceRenderingInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderingInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderingInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceRenderingInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, viewMask, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat, rasterizationSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceRenderingInfo const & ) const = default; #else bool operator==( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewMask == rhs.viewMask ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ) && ( rasterizationSamples == rhs.rasterizationSamples ); # endif } bool operator!=( CommandBufferInheritanceRenderingInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceRenderingInfo; const void * pNext = {}; RenderingFlags flags = {}; uint32_t viewMask = {}; uint32_t colorAttachmentCount = {}; const Format * pColorAttachmentFormats = {}; Format depthAttachmentFormat = Format::eUndefined; Format stencilAttachmentFormat = Format::eUndefined; SampleCountFlagBits rasterizationSamples = SampleCountFlagBits::e1; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceRenderingInfo; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceRenderingInfo; }; using CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo; // wrapper struct for struct VkViewport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkViewport.html struct Viewport { using NativeType = VkViewport; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } , width{ width_ } , height{ height_ } , minDepth{ minDepth_ } , maxDepth{ maxDepth_ } { } VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT : Viewport( *reinterpret_cast( &rhs ) ) {} Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 Viewport & setX( float x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setX( float x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Viewport & setY( float y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setY( float y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Viewport & setWidth( float width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setWidth( float width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Viewport & setHeight( float height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setHeight( float height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Viewport & setMinDepth( float minDepth_ ) & VULKAN_HPP_NOEXCEPT { minDepth = minDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setMinDepth( float minDepth_ ) && VULKAN_HPP_NOEXCEPT { minDepth = minDepth_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 Viewport & setMaxDepth( float maxDepth_ ) & VULKAN_HPP_NOEXCEPT { maxDepth = maxDepth_; return *this; } VULKAN_HPP_CONSTEXPR_14 Viewport && setMaxDepth( float maxDepth_ ) && VULKAN_HPP_NOEXCEPT { maxDepth = maxDepth_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkViewport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkViewport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkViewport const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkViewport *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y, width, height, minDepth, maxDepth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( Viewport const & ) const = default; #else bool operator==( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ) && ( width == rhs.width ) && ( height == rhs.height ) && ( minDepth == rhs.minDepth ) && ( maxDepth == rhs.maxDepth ); # endif } bool operator!=( Viewport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float x = {}; float y = {}; float width = {}; float height = {}; float minDepth = {}; float maxDepth = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = Viewport; }; #endif // wrapper struct for struct VkCommandBufferInheritanceViewportScissorInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferInheritanceViewportScissorInfoNV.html struct CommandBufferInheritanceViewportScissorInfoNV { using NativeType = VkCommandBufferInheritanceViewportScissorInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( Bool32 viewportScissor2D_ = {}, uint32_t viewportDepthCount_ = {}, const Viewport * pViewportDepths_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , viewportScissor2D{ viewportScissor2D_ } , viewportDepthCount{ viewportDepthCount_ } , pViewportDepths{ pViewportDepths_ } { } VULKAN_HPP_CONSTEXPR CommandBufferInheritanceViewportScissorInfoNV( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferInheritanceViewportScissorInfoNV( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferInheritanceViewportScissorInfoNV( *reinterpret_cast( &rhs ) ) { } CommandBufferInheritanceViewportScissorInfoNV & operator=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferInheritanceViewportScissorInfoNV & operator=( VkCommandBufferInheritanceViewportScissorInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportScissor2D( Bool32 viewportScissor2D_ ) & VULKAN_HPP_NOEXCEPT { viewportScissor2D = viewportScissor2D_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV && setViewportScissor2D( Bool32 viewportScissor2D_ ) && VULKAN_HPP_NOEXCEPT { viewportScissor2D = viewportScissor2D_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setViewportDepthCount( uint32_t viewportDepthCount_ ) & VULKAN_HPP_NOEXCEPT { viewportDepthCount = viewportDepthCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV && setViewportDepthCount( uint32_t viewportDepthCount_ ) && VULKAN_HPP_NOEXCEPT { viewportDepthCount = viewportDepthCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV & setPViewportDepths( const Viewport * pViewportDepths_ ) & VULKAN_HPP_NOEXCEPT { pViewportDepths = pViewportDepths_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceViewportScissorInfoNV && setPViewportDepths( const Viewport * pViewportDepths_ ) && VULKAN_HPP_NOEXCEPT { pViewportDepths = pViewportDepths_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferInheritanceViewportScissorInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceViewportScissorInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferInheritanceViewportScissorInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferInheritanceViewportScissorInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, viewportScissor2D, viewportDepthCount, pViewportDepths ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferInheritanceViewportScissorInfoNV const & ) const = default; #else bool operator==( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewportScissor2D == rhs.viewportScissor2D ) && ( viewportDepthCount == rhs.viewportDepthCount ) && ( pViewportDepths == rhs.pViewportDepths ); # endif } bool operator!=( CommandBufferInheritanceViewportScissorInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferInheritanceViewportScissorInfoNV; const void * pNext = {}; Bool32 viewportScissor2D = {}; uint32_t viewportDepthCount = {}; const Viewport * pViewportDepths = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferInheritanceViewportScissorInfoNV; }; #endif template <> struct CppType { using Type = CommandBufferInheritanceViewportScissorInfoNV; }; // wrapper struct for struct VkCommandBufferSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandBufferSubmitInfo.html struct CommandBufferSubmitInfo { using NativeType = VkCommandBufferSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferSubmitInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , commandBuffer{ commandBuffer_ } , deviceMask{ deviceMask_ } { } VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandBufferSubmitInfo( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandBufferSubmitInfo( *reinterpret_cast( &rhs ) ) { } CommandBufferSubmitInfo & operator=( CommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandBufferSubmitInfo & operator=( VkCommandBufferSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setCommandBuffer( CommandBuffer commandBuffer_ ) & VULKAN_HPP_NOEXCEPT { commandBuffer = commandBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo && setCommandBuffer( CommandBuffer commandBuffer_ ) && VULKAN_HPP_NOEXCEPT { commandBuffer = commandBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo & setDeviceMask( uint32_t deviceMask_ ) & VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandBufferSubmitInfo && setDeviceMask( uint32_t deviceMask_ ) && VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandBufferSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandBufferSubmitInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandBufferSubmitInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, commandBuffer, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandBufferSubmitInfo const & ) const = default; #else bool operator==( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBuffer == rhs.commandBuffer ) && ( deviceMask == rhs.deviceMask ); # endif } bool operator!=( CommandBufferSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandBufferSubmitInfo; const void * pNext = {}; CommandBuffer commandBuffer = {}; uint32_t deviceMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandBufferSubmitInfo; }; #endif template <> struct CppType { using Type = CommandBufferSubmitInfo; }; using CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo; // wrapper struct for struct VkCommandPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCommandPoolCreateInfo.html struct CommandPoolCreateInfo { using NativeType = VkCommandPoolCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , queueFamilyIndex{ queueFamilyIndex_ } { } VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CommandPoolCreateInfo( *reinterpret_cast( &rhs ) ) { } CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setFlags( CommandPoolCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo && setFlags( CommandPoolCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo && setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCommandPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCommandPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCommandPoolCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, queueFamilyIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CommandPoolCreateInfo const & ) const = default; #else bool operator==( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ); # endif } bool operator!=( CommandPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCommandPoolCreateInfo; const void * pNext = {}; CommandPoolCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CommandPoolCreateInfo; }; #endif template <> struct CppType { using Type = CommandPoolCreateInfo; }; // wrapper struct for struct VkComputeOccupancyPriorityParametersNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputeOccupancyPriorityParametersNV.html struct ComputeOccupancyPriorityParametersNV { using NativeType = VkComputeOccupancyPriorityParametersNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputeOccupancyPriorityParametersNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ComputeOccupancyPriorityParametersNV( float occupancyPriority_ = {}, float occupancyThrottling_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , occupancyPriority{ occupancyPriority_ } , occupancyThrottling{ occupancyThrottling_ } { } VULKAN_HPP_CONSTEXPR ComputeOccupancyPriorityParametersNV( ComputeOccupancyPriorityParametersNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComputeOccupancyPriorityParametersNV( VkComputeOccupancyPriorityParametersNV const & rhs ) VULKAN_HPP_NOEXCEPT : ComputeOccupancyPriorityParametersNV( *reinterpret_cast( &rhs ) ) { } ComputeOccupancyPriorityParametersNV & operator=( ComputeOccupancyPriorityParametersNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ComputeOccupancyPriorityParametersNV & operator=( VkComputeOccupancyPriorityParametersNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV & setOccupancyPriority( float occupancyPriority_ ) & VULKAN_HPP_NOEXCEPT { occupancyPriority = occupancyPriority_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV && setOccupancyPriority( float occupancyPriority_ ) && VULKAN_HPP_NOEXCEPT { occupancyPriority = occupancyPriority_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV & setOccupancyThrottling( float occupancyThrottling_ ) & VULKAN_HPP_NOEXCEPT { occupancyThrottling = occupancyThrottling_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputeOccupancyPriorityParametersNV && setOccupancyThrottling( float occupancyThrottling_ ) && VULKAN_HPP_NOEXCEPT { occupancyThrottling = occupancyThrottling_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkComputeOccupancyPriorityParametersNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputeOccupancyPriorityParametersNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputeOccupancyPriorityParametersNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkComputeOccupancyPriorityParametersNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, occupancyPriority, occupancyThrottling ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ComputeOccupancyPriorityParametersNV const & ) const = default; #else bool operator==( ComputeOccupancyPriorityParametersNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( occupancyPriority == rhs.occupancyPriority ) && ( occupancyThrottling == rhs.occupancyThrottling ); # endif } bool operator!=( ComputeOccupancyPriorityParametersNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eComputeOccupancyPriorityParametersNV; const void * pNext = {}; float occupancyPriority = {}; float occupancyThrottling = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ComputeOccupancyPriorityParametersNV; }; #endif template <> struct CppType { using Type = ComputeOccupancyPriorityParametersNV; }; // wrapper struct for struct VkSpecializationMapEntry, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationMapEntry.html struct SpecializationMapEntry { using NativeType = VkSpecializationMapEntry; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT : constantID{ constantID_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationMapEntry( *reinterpret_cast( &rhs ) ) { } SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setConstantID( uint32_t constantID_ ) & VULKAN_HPP_NOEXCEPT { constantID = constantID_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry && setConstantID( uint32_t constantID_ ) && VULKAN_HPP_NOEXCEPT { constantID = constantID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & setSize( size_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry && setSize( size_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSpecializationMapEntry const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSpecializationMapEntry const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSpecializationMapEntry *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( constantID, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SpecializationMapEntry const & ) const = default; #else bool operator==( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( constantID == rhs.constantID ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( SpecializationMapEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t constantID = {}; uint32_t offset = {}; size_t size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SpecializationMapEntry; }; #endif // wrapper struct for struct VkSpecializationInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSpecializationInfo.html struct SpecializationInfo { using NativeType = VkSpecializationInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, const SpecializationMapEntry * pMapEntries_ = {}, size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT : mapEntryCount{ mapEntryCount_ } , pMapEntries{ pMapEntries_ } , dataSize{ dataSize_ } , pData{ pData_ } { } VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SpecializationInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template SpecializationInfo( ArrayProxyNoTemporaries const & mapEntries_, ArrayProxyNoTemporaries const & data_ = {} ) : mapEntryCount( static_cast( mapEntries_.size() ) ) , pMapEntries( mapEntries_.data() ) , dataSize( data_.size() * sizeof( T ) ) , pData( data_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) & VULKAN_HPP_NOEXCEPT { mapEntryCount = mapEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo && setMapEntryCount( uint32_t mapEntryCount_ ) && VULKAN_HPP_NOEXCEPT { mapEntryCount = mapEntryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPMapEntries( const SpecializationMapEntry * pMapEntries_ ) & VULKAN_HPP_NOEXCEPT { pMapEntries = pMapEntries_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo && setPMapEntries( const SpecializationMapEntry * pMapEntries_ ) && VULKAN_HPP_NOEXCEPT { pMapEntries = pMapEntries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) SpecializationInfo & setMapEntries( ArrayProxyNoTemporaries const & mapEntries_ ) VULKAN_HPP_NOEXCEPT { mapEntryCount = static_cast( mapEntries_.size() ); pMapEntries = mapEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setDataSize( size_t dataSize_ ) & VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo && setDataSize( size_t dataSize_ ) && VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & setPData( const void * pData_ ) & VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } VULKAN_HPP_CONSTEXPR_14 SpecializationInfo && setPData( const void * pData_ ) && VULKAN_HPP_NOEXCEPT { pData = pData_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template SpecializationInfo & setData( ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof( T ); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSpecializationInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSpecializationInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSpecializationInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( mapEntryCount, pMapEntries, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SpecializationInfo const & ) const = default; #else bool operator==( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( mapEntryCount == rhs.mapEntryCount ) && ( pMapEntries == rhs.pMapEntries ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } bool operator!=( SpecializationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t mapEntryCount = {}; const SpecializationMapEntry * pMapEntries = {}; size_t dataSize = {}; const void * pData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SpecializationInfo; }; #endif // wrapper struct for struct VkPipelineShaderStageCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineShaderStageCreateInfo.html struct PipelineShaderStageCreateInfo { using NativeType = VkPipelineShaderStageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateFlags flags_ = {}, ShaderStageFlagBits stage_ = ShaderStageFlagBits::eVertex, ShaderModule module_ = {}, const char * pName_ = {}, const SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , stage{ stage_ } , module{ module_ } , pName{ pName_ } , pSpecializationInfo{ pSpecializationInfo_ } { } VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineShaderStageCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setFlags( PipelineShaderStageCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setFlags( PipelineShaderStageCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setStage( ShaderStageFlagBits stage_ ) & VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setStage( ShaderStageFlagBits stage_ ) && VULKAN_HPP_NOEXCEPT { stage = stage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setModule( ShaderModule module_ ) & VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setModule( ShaderModule module_ ) && VULKAN_HPP_NOEXCEPT { module = module_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPName( const char * pName_ ) & VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setPName( const char * pName_ ) && VULKAN_HPP_NOEXCEPT { pName = pName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) & VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo && setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) && VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineShaderStageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineShaderStageCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineShaderStageCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, stage, module, pName, pSpecializationInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = stage <=> rhs.stage; cmp != 0 ) return cmp; if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp; if ( pName != rhs.pName ) if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ); } bool operator!=( PipelineShaderStageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::ePipelineShaderStageCreateInfo; const void * pNext = {}; PipelineShaderStageCreateFlags flags = {}; ShaderStageFlagBits stage = ShaderStageFlagBits::eVertex; ShaderModule module = {}; const char * pName = {}; const SpecializationInfo * pSpecializationInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineShaderStageCreateInfo; }; #endif template <> struct CppType { using Type = PipelineShaderStageCreateInfo; }; // wrapper struct for struct VkComputePipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineCreateInfo.html struct ComputePipelineCreateInfo { using NativeType = VkComputePipelineCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( PipelineCreateFlags flags_ = {}, PipelineShaderStageCreateInfo stage_ = {}, PipelineLayout layout_ = {}, Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , stage{ stage_ } , layout{ layout_ } , basePipelineHandle{ basePipelineHandle_ } , basePipelineIndex{ basePipelineIndex_ } { } VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ComputePipelineCreateInfo( *reinterpret_cast( &rhs ) ) { } ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setFlags( PipelineCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setFlags( PipelineCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setStage( PipelineShaderStageCreateInfo const & stage_ ) & VULKAN_HPP_NOEXCEPT { stage = stage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setStage( PipelineShaderStageCreateInfo const & stage_ ) && VULKAN_HPP_NOEXCEPT { stage = stage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineHandle( Pipeline basePipelineHandle_ ) & VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setBasePipelineHandle( Pipeline basePipelineHandle_ ) && VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) & VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo && setBasePipelineIndex( int32_t basePipelineIndex_ ) && VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkComputePipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputePipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkComputePipelineCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, stage, layout, basePipelineHandle, basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ComputePipelineCreateInfo const & ) const = default; #else bool operator==( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stage == rhs.stage ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); # endif } bool operator!=( ComputePipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eComputePipelineCreateInfo; const void * pNext = {}; PipelineCreateFlags flags = {}; PipelineShaderStageCreateInfo stage = {}; PipelineLayout layout = {}; Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ComputePipelineCreateInfo; }; #endif template <> struct CppType { using Type = ComputePipelineCreateInfo; }; // wrapper struct for struct VkComputePipelineIndirectBufferInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkComputePipelineIndirectBufferInfoNV.html struct ComputePipelineIndirectBufferInfoNV { using NativeType = VkComputePipelineIndirectBufferInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineIndirectBufferInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( DeviceAddress deviceAddress_ = {}, DeviceSize size_ = {}, DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceAddress{ deviceAddress_ } , size{ size_ } , pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ } { } VULKAN_HPP_CONSTEXPR ComputePipelineIndirectBufferInfoNV( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ComputePipelineIndirectBufferInfoNV( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ComputePipelineIndirectBufferInfoNV( *reinterpret_cast( &rhs ) ) { } ComputePipelineIndirectBufferInfoNV & operator=( ComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ComputePipelineIndirectBufferInfoNV & operator=( VkComputePipelineIndirectBufferInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV & setPipelineDeviceAddressCaptureReplay( DeviceAddress pipelineDeviceAddressCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 ComputePipelineIndirectBufferInfoNV && setPipelineDeviceAddressCaptureReplay( DeviceAddress pipelineDeviceAddressCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { pipelineDeviceAddressCaptureReplay = pipelineDeviceAddressCaptureReplay_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkComputePipelineIndirectBufferInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputePipelineIndirectBufferInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkComputePipelineIndirectBufferInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkComputePipelineIndirectBufferInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceAddress, size, pipelineDeviceAddressCaptureReplay ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ComputePipelineIndirectBufferInfoNV const & ) const = default; #else bool operator==( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ) && ( pipelineDeviceAddressCaptureReplay == rhs.pipelineDeviceAddressCaptureReplay ); # endif } bool operator!=( ComputePipelineIndirectBufferInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eComputePipelineIndirectBufferInfoNV; const void * pNext = {}; DeviceAddress deviceAddress = {}; DeviceSize size = {}; DeviceAddress pipelineDeviceAddressCaptureReplay = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ComputePipelineIndirectBufferInfoNV; }; #endif template <> struct CppType { using Type = ComputePipelineIndirectBufferInfoNV; }; // wrapper struct for struct VkConditionalRenderingBeginInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConditionalRenderingBeginInfoEXT.html struct ConditionalRenderingBeginInfoEXT { using NativeType = VkConditionalRenderingBeginInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( Buffer buffer_ = {}, DeviceSize offset_ = {}, ConditionalRenderingFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } , offset{ offset_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ConditionalRenderingBeginInfoEXT( *reinterpret_cast( &rhs ) ) { } ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & setFlags( ConditionalRenderingFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT && setFlags( ConditionalRenderingFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkConditionalRenderingBeginInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConditionalRenderingBeginInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkConditionalRenderingBeginInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer, offset, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ConditionalRenderingBeginInfoEXT const & ) const = default; #else bool operator==( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( flags == rhs.flags ); # endif } bool operator!=( ConditionalRenderingBeginInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; const void * pNext = {}; Buffer buffer = {}; DeviceSize offset = {}; ConditionalRenderingFlagsEXT flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ConditionalRenderingBeginInfoEXT; }; #endif template <> struct CppType { using Type = ConditionalRenderingBeginInfoEXT; }; // wrapper struct for struct VkConformanceVersion, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkConformanceVersion.html struct ConformanceVersion { using NativeType = VkConformanceVersion; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT : major{ major_ } , minor{ minor_ } , subminor{ subminor_ } , patch{ patch_ } { } VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT : ConformanceVersion( *reinterpret_cast( &rhs ) ) {} ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMajor( uint8_t major_ ) & VULKAN_HPP_NOEXCEPT { major = major_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion && setMajor( uint8_t major_ ) && VULKAN_HPP_NOEXCEPT { major = major_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setMinor( uint8_t minor_ ) & VULKAN_HPP_NOEXCEPT { minor = minor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion && setMinor( uint8_t minor_ ) && VULKAN_HPP_NOEXCEPT { minor = minor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setSubminor( uint8_t subminor_ ) & VULKAN_HPP_NOEXCEPT { subminor = subminor_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion && setSubminor( uint8_t subminor_ ) && VULKAN_HPP_NOEXCEPT { subminor = subminor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & setPatch( uint8_t patch_ ) & VULKAN_HPP_NOEXCEPT { patch = patch_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConformanceVersion && setPatch( uint8_t patch_ ) && VULKAN_HPP_NOEXCEPT { patch = patch_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkConformanceVersion const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConformanceVersion const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkConformanceVersion *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( major, minor, subminor, patch ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ConformanceVersion const & ) const = default; #else bool operator==( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( major == rhs.major ) && ( minor == rhs.minor ) && ( subminor == rhs.subminor ) && ( patch == rhs.patch ); # endif } bool operator!=( ConformanceVersion const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint8_t major = {}; uint8_t minor = {}; uint8_t subminor = {}; uint8_t patch = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ConformanceVersion; }; #endif using ConformanceVersionKHR = ConformanceVersion; // wrapper struct for struct VkConvertCooperativeVectorMatrixInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkConvertCooperativeVectorMatrixInfoNV.html struct ConvertCooperativeVectorMatrixInfoNV { using NativeType = VkConvertCooperativeVectorMatrixInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConvertCooperativeVectorMatrixInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( size_t srcSize_ = {}, DeviceOrHostAddressConstKHR srcData_ = {}, size_t * pDstSize_ = {}, DeviceOrHostAddressKHR dstData_ = {}, ComponentTypeKHR srcComponentType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR dstComponentType_ = ComponentTypeKHR::eFloat16, uint32_t numRows_ = {}, uint32_t numColumns_ = {}, CooperativeVectorMatrixLayoutNV srcLayout_ = CooperativeVectorMatrixLayoutNV::eRowMajor, size_t srcStride_ = {}, CooperativeVectorMatrixLayoutNV dstLayout_ = CooperativeVectorMatrixLayoutNV::eRowMajor, size_t dstStride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcSize{ srcSize_ } , srcData{ srcData_ } , pDstSize{ pDstSize_ } , dstData{ dstData_ } , srcComponentType{ srcComponentType_ } , dstComponentType{ dstComponentType_ } , numRows{ numRows_ } , numColumns{ numColumns_ } , srcLayout{ srcLayout_ } , srcStride{ srcStride_ } , dstLayout{ dstLayout_ } , dstStride{ dstStride_ } { } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ConvertCooperativeVectorMatrixInfoNV( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ConvertCooperativeVectorMatrixInfoNV( *reinterpret_cast( &rhs ) ) { } ConvertCooperativeVectorMatrixInfoNV & operator=( ConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ConvertCooperativeVectorMatrixInfoNV & operator=( VkConvertCooperativeVectorMatrixInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcSize( size_t srcSize_ ) & VULKAN_HPP_NOEXCEPT { srcSize = srcSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setSrcSize( size_t srcSize_ ) && VULKAN_HPP_NOEXCEPT { srcSize = srcSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcData( DeviceOrHostAddressConstKHR const & srcData_ ) & VULKAN_HPP_NOEXCEPT { srcData = srcData_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setSrcData( DeviceOrHostAddressConstKHR const & srcData_ ) && VULKAN_HPP_NOEXCEPT { srcData = srcData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setPDstSize( size_t * pDstSize_ ) & VULKAN_HPP_NOEXCEPT { pDstSize = pDstSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setPDstSize( size_t * pDstSize_ ) && VULKAN_HPP_NOEXCEPT { pDstSize = pDstSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstData( DeviceOrHostAddressKHR const & dstData_ ) & VULKAN_HPP_NOEXCEPT { dstData = dstData_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setDstData( DeviceOrHostAddressKHR const & dstData_ ) && VULKAN_HPP_NOEXCEPT { dstData = dstData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcComponentType( ComponentTypeKHR srcComponentType_ ) & VULKAN_HPP_NOEXCEPT { srcComponentType = srcComponentType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setSrcComponentType( ComponentTypeKHR srcComponentType_ ) && VULKAN_HPP_NOEXCEPT { srcComponentType = srcComponentType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstComponentType( ComponentTypeKHR dstComponentType_ ) & VULKAN_HPP_NOEXCEPT { dstComponentType = dstComponentType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setDstComponentType( ComponentTypeKHR dstComponentType_ ) && VULKAN_HPP_NOEXCEPT { dstComponentType = dstComponentType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumRows( uint32_t numRows_ ) & VULKAN_HPP_NOEXCEPT { numRows = numRows_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setNumRows( uint32_t numRows_ ) && VULKAN_HPP_NOEXCEPT { numRows = numRows_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setNumColumns( uint32_t numColumns_ ) & VULKAN_HPP_NOEXCEPT { numColumns = numColumns_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setNumColumns( uint32_t numColumns_ ) && VULKAN_HPP_NOEXCEPT { numColumns = numColumns_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcLayout( CooperativeVectorMatrixLayoutNV srcLayout_ ) & VULKAN_HPP_NOEXCEPT { srcLayout = srcLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setSrcLayout( CooperativeVectorMatrixLayoutNV srcLayout_ ) && VULKAN_HPP_NOEXCEPT { srcLayout = srcLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setSrcStride( size_t srcStride_ ) & VULKAN_HPP_NOEXCEPT { srcStride = srcStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setSrcStride( size_t srcStride_ ) && VULKAN_HPP_NOEXCEPT { srcStride = srcStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstLayout( CooperativeVectorMatrixLayoutNV dstLayout_ ) & VULKAN_HPP_NOEXCEPT { dstLayout = dstLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setDstLayout( CooperativeVectorMatrixLayoutNV dstLayout_ ) && VULKAN_HPP_NOEXCEPT { dstLayout = dstLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV & setDstStride( size_t dstStride_ ) & VULKAN_HPP_NOEXCEPT { dstStride = dstStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 ConvertCooperativeVectorMatrixInfoNV && setDstStride( size_t dstStride_ ) && VULKAN_HPP_NOEXCEPT { dstStride = dstStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkConvertCooperativeVectorMatrixInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConvertCooperativeVectorMatrixInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkConvertCooperativeVectorMatrixInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkConvertCooperativeVectorMatrixInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcSize, srcData, pDstSize, dstData, srcComponentType, dstComponentType, numRows, numColumns, srcLayout, srcStride, dstLayout, dstStride ); } #endif public: StructureType sType = StructureType::eConvertCooperativeVectorMatrixInfoNV; const void * pNext = {}; size_t srcSize = {}; DeviceOrHostAddressConstKHR srcData = {}; size_t * pDstSize = {}; DeviceOrHostAddressKHR dstData = {}; ComponentTypeKHR srcComponentType = ComponentTypeKHR::eFloat16; ComponentTypeKHR dstComponentType = ComponentTypeKHR::eFloat16; uint32_t numRows = {}; uint32_t numColumns = {}; CooperativeVectorMatrixLayoutNV srcLayout = CooperativeVectorMatrixLayoutNV::eRowMajor; size_t srcStride = {}; CooperativeVectorMatrixLayoutNV dstLayout = CooperativeVectorMatrixLayoutNV::eRowMajor; size_t dstStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ConvertCooperativeVectorMatrixInfoNV; }; #endif template <> struct CppType { using Type = ConvertCooperativeVectorMatrixInfoNV; }; // wrapper struct for struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixFlexibleDimensionsPropertiesNV.html struct CooperativeMatrixFlexibleDimensionsPropertiesNV { using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t MGranularity_ = {}, uint32_t NGranularity_ = {}, uint32_t KGranularity_ = {}, ComponentTypeKHR AType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR BType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR CType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR ResultType_ = ComponentTypeKHR::eFloat16, Bool32 saturatingAccumulation_ = {}, ScopeKHR scope_ = ScopeKHR::eDevice, uint32_t workgroupInvocations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , MGranularity{ MGranularity_ } , NGranularity{ NGranularity_ } , KGranularity{ KGranularity_ } , AType{ AType_ } , BType{ BType_ } , CType{ CType_ } , ResultType{ ResultType_ } , saturatingAccumulation{ saturatingAccumulation_ } , scope{ scope_ } , workgroupInvocations{ workgroupInvocations_ } { } VULKAN_HPP_CONSTEXPR CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast( &rhs ) ) { } CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default; #else bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) && ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) && ( workgroupInvocations == rhs.workgroupInvocations ); # endif } bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; void * pNext = {}; uint32_t MGranularity = {}; uint32_t NGranularity = {}; uint32_t KGranularity = {}; ComponentTypeKHR AType = ComponentTypeKHR::eFloat16; ComponentTypeKHR BType = ComponentTypeKHR::eFloat16; ComponentTypeKHR CType = ComponentTypeKHR::eFloat16; ComponentTypeKHR ResultType = ComponentTypeKHR::eFloat16; Bool32 saturatingAccumulation = {}; ScopeKHR scope = ScopeKHR::eDevice; uint32_t workgroupInvocations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; }; #endif template <> struct CppType { using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; }; // wrapper struct for struct VkCooperativeMatrixPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesKHR.html struct CooperativeMatrixPropertiesKHR { using NativeType = VkCooperativeMatrixPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, ComponentTypeKHR AType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR BType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR CType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR ResultType_ = ComponentTypeKHR::eFloat16, Bool32 saturatingAccumulation_ = {}, ScopeKHR scope_ = ScopeKHR::eDevice, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , MSize{ MSize_ } , NSize{ NSize_ } , KSize{ KSize_ } , AType{ AType_ } , BType{ BType_ } , CType{ CType_ } , ResultType{ ResultType_ } , saturatingAccumulation{ saturatingAccumulation_ } , scope{ scope_ } { } VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesKHR( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) { } CooperativeMatrixPropertiesKHR & operator=( CooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CooperativeMatrixPropertiesKHR & operator=( VkCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, ResultType, saturatingAccumulation, scope ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CooperativeMatrixPropertiesKHR const & ) const = default; #else bool operator==( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ); # endif } bool operator!=( CooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCooperativeMatrixPropertiesKHR; void * pNext = {}; uint32_t MSize = {}; uint32_t NSize = {}; uint32_t KSize = {}; ComponentTypeKHR AType = ComponentTypeKHR::eFloat16; ComponentTypeKHR BType = ComponentTypeKHR::eFloat16; ComponentTypeKHR CType = ComponentTypeKHR::eFloat16; ComponentTypeKHR ResultType = ComponentTypeKHR::eFloat16; Bool32 saturatingAccumulation = {}; ScopeKHR scope = ScopeKHR::eDevice; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CooperativeMatrixPropertiesKHR; }; #endif template <> struct CppType { using Type = CooperativeMatrixPropertiesKHR; }; // wrapper struct for struct VkCooperativeMatrixPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeMatrixPropertiesNV.html struct CooperativeMatrixPropertiesNV { using NativeType = VkCooperativeMatrixPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, ComponentTypeNV AType_ = {}, ComponentTypeNV BType_ = {}, ComponentTypeNV CType_ = {}, ComponentTypeNV DType_ = {}, ScopeNV scope_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , MSize{ MSize_ } , NSize{ NSize_ } , KSize{ KSize_ } , AType{ AType_ } , BType{ BType_ } , CType{ CType_ } , DType{ DType_ } , scope{ scope_ } { } VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : CooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) { } CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, MSize, NSize, KSize, AType, BType, CType, DType, scope ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CooperativeMatrixPropertiesNV const & ) const = default; #else bool operator==( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MSize == rhs.MSize ) && ( NSize == rhs.NSize ) && ( KSize == rhs.KSize ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && ( DType == rhs.DType ) && ( scope == rhs.scope ); # endif } bool operator!=( CooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; void * pNext = {}; uint32_t MSize = {}; uint32_t NSize = {}; uint32_t KSize = {}; ComponentTypeNV AType = {}; ComponentTypeNV BType = {}; ComponentTypeNV CType = {}; ComponentTypeNV DType = {}; ScopeNV scope = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CooperativeMatrixPropertiesNV; }; #endif template <> struct CppType { using Type = CooperativeMatrixPropertiesNV; }; // wrapper struct for struct VkCooperativeVectorPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCooperativeVectorPropertiesNV.html struct CooperativeVectorPropertiesNV { using NativeType = VkCooperativeVectorPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeVectorPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( ComponentTypeKHR inputType_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR inputInterpretation_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR matrixInterpretation_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR biasInterpretation_ = ComponentTypeKHR::eFloat16, ComponentTypeKHR resultType_ = ComponentTypeKHR::eFloat16, Bool32 transpose_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , inputType{ inputType_ } , inputInterpretation{ inputInterpretation_ } , matrixInterpretation{ matrixInterpretation_ } , biasInterpretation{ biasInterpretation_ } , resultType{ resultType_ } , transpose{ transpose_ } { } VULKAN_HPP_CONSTEXPR CooperativeVectorPropertiesNV( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CooperativeVectorPropertiesNV( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : CooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) { } CooperativeVectorPropertiesNV & operator=( CooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CooperativeVectorPropertiesNV & operator=( VkCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputType( ComponentTypeKHR inputType_ ) & VULKAN_HPP_NOEXCEPT { inputType = inputType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setInputType( ComponentTypeKHR inputType_ ) && VULKAN_HPP_NOEXCEPT { inputType = inputType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setInputInterpretation( ComponentTypeKHR inputInterpretation_ ) & VULKAN_HPP_NOEXCEPT { inputInterpretation = inputInterpretation_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setInputInterpretation( ComponentTypeKHR inputInterpretation_ ) && VULKAN_HPP_NOEXCEPT { inputInterpretation = inputInterpretation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setMatrixInterpretation( ComponentTypeKHR matrixInterpretation_ ) & VULKAN_HPP_NOEXCEPT { matrixInterpretation = matrixInterpretation_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setMatrixInterpretation( ComponentTypeKHR matrixInterpretation_ ) && VULKAN_HPP_NOEXCEPT { matrixInterpretation = matrixInterpretation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setBiasInterpretation( ComponentTypeKHR biasInterpretation_ ) & VULKAN_HPP_NOEXCEPT { biasInterpretation = biasInterpretation_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setBiasInterpretation( ComponentTypeKHR biasInterpretation_ ) && VULKAN_HPP_NOEXCEPT { biasInterpretation = biasInterpretation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setResultType( ComponentTypeKHR resultType_ ) & VULKAN_HPP_NOEXCEPT { resultType = resultType_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setResultType( ComponentTypeKHR resultType_ ) && VULKAN_HPP_NOEXCEPT { resultType = resultType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV & setTranspose( Bool32 transpose_ ) & VULKAN_HPP_NOEXCEPT { transpose = transpose_; return *this; } VULKAN_HPP_CONSTEXPR_14 CooperativeVectorPropertiesNV && setTranspose( Bool32 transpose_ ) && VULKAN_HPP_NOEXCEPT { transpose = transpose_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, inputType, inputInterpretation, matrixInterpretation, biasInterpretation, resultType, transpose ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CooperativeVectorPropertiesNV const & ) const = default; #else bool operator==( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inputType == rhs.inputType ) && ( inputInterpretation == rhs.inputInterpretation ) && ( matrixInterpretation == rhs.matrixInterpretation ) && ( biasInterpretation == rhs.biasInterpretation ) && ( resultType == rhs.resultType ) && ( transpose == rhs.transpose ); # endif } bool operator!=( CooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCooperativeVectorPropertiesNV; void * pNext = {}; ComponentTypeKHR inputType = ComponentTypeKHR::eFloat16; ComponentTypeKHR inputInterpretation = ComponentTypeKHR::eFloat16; ComponentTypeKHR matrixInterpretation = ComponentTypeKHR::eFloat16; ComponentTypeKHR biasInterpretation = ComponentTypeKHR::eFloat16; ComponentTypeKHR resultType = ComponentTypeKHR::eFloat16; Bool32 transpose = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CooperativeVectorPropertiesNV; }; #endif template <> struct CppType { using Type = CooperativeVectorPropertiesNV; }; // wrapper struct for struct VkCopyAccelerationStructureInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureInfoKHR.html struct CopyAccelerationStructureInfoKHR { using NativeType = VkCopyAccelerationStructureInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( AccelerationStructureKHR src_ = {}, AccelerationStructureKHR dst_ = {}, CopyAccelerationStructureModeKHR mode_ = CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) { } CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setSrc( AccelerationStructureKHR src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR && setSrc( AccelerationStructureKHR src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setDst( AccelerationStructureKHR dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR && setDst( AccelerationStructureKHR dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR && setMode( CopyAccelerationStructureModeKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyAccelerationStructureInfoKHR const & ) const = default; #else bool operator==( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); # endif } bool operator!=( CopyAccelerationStructureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR; const void * pNext = {}; AccelerationStructureKHR src = {}; AccelerationStructureKHR dst = {}; CopyAccelerationStructureModeKHR mode = CopyAccelerationStructureModeKHR::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyAccelerationStructureInfoKHR; }; #endif template <> struct CppType { using Type = CopyAccelerationStructureInfoKHR; }; // wrapper struct for struct VkCopyAccelerationStructureToMemoryInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyAccelerationStructureToMemoryInfoKHR.html struct CopyAccelerationStructureToMemoryInfoKHR { using NativeType = VkCopyAccelerationStructureToMemoryInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( AccelerationStructureKHR src_ = {}, DeviceOrHostAddressKHR dst_ = {}, CopyAccelerationStructureModeKHR mode_ = CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast( &rhs ) ) { } CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setSrc( AccelerationStructureKHR src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR && setSrc( AccelerationStructureKHR src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setDst( DeviceOrHostAddressKHR const & dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR && setDst( DeviceOrHostAddressKHR const & dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureToMemoryInfoKHR && setMode( CopyAccelerationStructureModeKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyAccelerationStructureToMemoryInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyAccelerationStructureToMemoryInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyAccelerationStructureToMemoryInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif public: StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR; const void * pNext = {}; AccelerationStructureKHR src = {}; DeviceOrHostAddressKHR dst = {}; CopyAccelerationStructureModeKHR mode = CopyAccelerationStructureModeKHR::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyAccelerationStructureToMemoryInfoKHR; }; #endif template <> struct CppType { using Type = CopyAccelerationStructureToMemoryInfoKHR; }; // wrapper struct for struct VkCopyBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferInfo2.html struct CopyBufferInfo2 { using NativeType = VkCopyBufferInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyBufferInfo2( Buffer srcBuffer_ = {}, Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const BufferCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcBuffer{ srcBuffer_ } , dstBuffer{ dstBuffer_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyBufferInfo2( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyBufferInfo2( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferInfo2( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyBufferInfo2( Buffer srcBuffer_, Buffer dstBuffer_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ), srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyBufferInfo2 & operator=( CopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyBufferInfo2 & operator=( VkCopyBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setSrcBuffer( Buffer srcBuffer_ ) & VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 && setSrcBuffer( Buffer srcBuffer_ ) && VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setDstBuffer( Buffer dstBuffer_ ) & VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 && setDstBuffer( Buffer dstBuffer_ ) && VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 & setPRegions( const BufferCopy2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2 && setPRegions( const BufferCopy2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyBufferInfo2 & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyBufferInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyBufferInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcBuffer, dstBuffer, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyBufferInfo2 const & ) const = default; #else bool operator==( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyBufferInfo2; const void * pNext = {}; Buffer srcBuffer = {}; Buffer dstBuffer = {}; uint32_t regionCount = {}; const BufferCopy2 * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyBufferInfo2; }; #endif template <> struct CppType { using Type = CopyBufferInfo2; }; using CopyBufferInfo2KHR = CopyBufferInfo2; // wrapper struct for struct VkCopyBufferToImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyBufferToImageInfo2.html struct CopyBufferToImageInfo2 { using NativeType = VkCopyBufferToImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( Buffer srcBuffer_ = {}, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcBuffer{ srcBuffer_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyBufferToImageInfo2( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyBufferToImageInfo2( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyBufferToImageInfo2( Buffer srcBuffer_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , srcBuffer( srcBuffer_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyBufferToImageInfo2 & operator=( CopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyBufferToImageInfo2 & operator=( VkCopyBufferToImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setSrcBuffer( Buffer srcBuffer_ ) & VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setSrcBuffer( Buffer srcBuffer_ ) && VULKAN_HPP_NOEXCEPT { srcBuffer = srcBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 & setPRegions( const BufferImageCopy2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2 && setPRegions( const BufferImageCopy2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyBufferToImageInfo2 & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyBufferToImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyBufferToImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyBufferToImageInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyBufferToImageInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyBufferToImageInfo2 const & ) const = default; #else bool operator==( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcBuffer == rhs.srcBuffer ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyBufferToImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyBufferToImageInfo2; const void * pNext = {}; Buffer srcBuffer = {}; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const BufferImageCopy2 * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyBufferToImageInfo2; }; #endif template <> struct CppType { using Type = CopyBufferToImageInfo2; }; using CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2; // wrapper struct for struct VkCopyCommandTransformInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyCommandTransformInfoQCOM.html struct CopyCommandTransformInfoQCOM { using NativeType = VkCopyCommandTransformInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , transform{ transform_ } { } VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : CopyCommandTransformInfoQCOM( *reinterpret_cast( &rhs ) ) { } CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & setTransform( SurfaceTransformFlagBitsKHR transform_ ) & VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM && setTransform( SurfaceTransformFlagBitsKHR transform_ ) && VULKAN_HPP_NOEXCEPT { transform = transform_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyCommandTransformInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyCommandTransformInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyCommandTransformInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, transform ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyCommandTransformInfoQCOM const & ) const = default; #else bool operator==( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( transform == rhs.transform ); # endif } bool operator!=( CopyCommandTransformInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyCommandTransformInfoQCOM; const void * pNext = {}; SurfaceTransformFlagBitsKHR transform = SurfaceTransformFlagBitsKHR::eIdentity; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyCommandTransformInfoQCOM; }; #endif template <> struct CppType { using Type = CopyCommandTransformInfoQCOM; }; // wrapper struct for struct VkCopyDescriptorSet, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyDescriptorSet.html struct CopyDescriptorSet { using NativeType = VkCopyDescriptorSet; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyDescriptorSet( DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcSet{ srcSet_ } , srcBinding{ srcBinding_ } , srcArrayElement{ srcArrayElement_ } , dstSet{ dstSet_ } , dstBinding{ dstBinding_ } , dstArrayElement{ dstArrayElement_ } , descriptorCount{ descriptorCount_ } { } VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT : CopyDescriptorSet( *reinterpret_cast( &rhs ) ) {} CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcSet( DescriptorSet srcSet_ ) & VULKAN_HPP_NOEXCEPT { srcSet = srcSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setSrcSet( DescriptorSet srcSet_ ) && VULKAN_HPP_NOEXCEPT { srcSet = srcSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) & VULKAN_HPP_NOEXCEPT { srcBinding = srcBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setSrcBinding( uint32_t srcBinding_ ) && VULKAN_HPP_NOEXCEPT { srcBinding = srcBinding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) & VULKAN_HPP_NOEXCEPT { srcArrayElement = srcArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setSrcArrayElement( uint32_t srcArrayElement_ ) && VULKAN_HPP_NOEXCEPT { srcArrayElement = srcArrayElement_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstSet( DescriptorSet dstSet_ ) & VULKAN_HPP_NOEXCEPT { dstSet = dstSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setDstSet( DescriptorSet dstSet_ ) && VULKAN_HPP_NOEXCEPT { dstSet = dstSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) & VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setDstBinding( uint32_t dstBinding_ ) && VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) & VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setDstArrayElement( uint32_t dstArrayElement_ ) && VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet && setDescriptorCount( uint32_t descriptorCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyDescriptorSet const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyDescriptorSet const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyDescriptorSet *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcSet, srcBinding, srcArrayElement, dstSet, dstBinding, dstArrayElement, descriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyDescriptorSet const & ) const = default; #else bool operator==( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSet == rhs.srcSet ) && ( srcBinding == rhs.srcBinding ) && ( srcArrayElement == rhs.srcArrayElement ) && ( dstSet == rhs.dstSet ) && ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ); # endif } bool operator!=( CopyDescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyDescriptorSet; const void * pNext = {}; DescriptorSet srcSet = {}; uint32_t srcBinding = {}; uint32_t srcArrayElement = {}; DescriptorSet dstSet = {}; uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyDescriptorSet; }; #endif template <> struct CppType { using Type = CopyDescriptorSet; }; // wrapper struct for struct VkImageCopy2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy2.html struct ImageCopy2 { using NativeType = VkImageCopy2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCopy2( ImageSubresourceLayers srcSubresource_ = {}, Offset3D srcOffset_ = {}, ImageSubresourceLayers dstSubresource_ = {}, Offset3D dstOffset_ = {}, Extent3D extent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcSubresource{ srcSubresource_ } , srcOffset{ srcOffset_ } , dstSubresource{ dstSubresource_ } , dstOffset{ dstOffset_ } , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR ImageCopy2( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCopy2( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy2( *reinterpret_cast( &rhs ) ) {} ImageCopy2 & operator=( ImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCopy2 & operator=( VkImageCopy2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setSrcOffset( Offset3D const & srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setSrcOffset( Offset3D const & srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setDstOffset( Offset3D const & dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setDstOffset( Offset3D const & dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy2 && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageCopy2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCopy2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCopy2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCopy2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCopy2 const & ) const = default; #else bool operator==( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } bool operator!=( ImageCopy2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageCopy2; const void * pNext = {}; ImageSubresourceLayers srcSubresource = {}; Offset3D srcOffset = {}; ImageSubresourceLayers dstSubresource = {}; Offset3D dstOffset = {}; Extent3D extent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCopy2; }; #endif template <> struct CppType { using Type = ImageCopy2; }; using ImageCopy2KHR = ImageCopy2; // wrapper struct for struct VkCopyImageInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageInfo2.html struct CopyImageInfo2 { using NativeType = VkCopyImageInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyImageInfo2( Image srcImage_ = {}, ImageLayout srcImageLayout_ = ImageLayout::eUndefined, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcImage{ srcImage_ } , srcImageLayout{ srcImageLayout_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyImageInfo2( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyImageInfo2( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageInfo2( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageInfo2( Image srcImage_, ImageLayout srcImageLayout_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , srcImage( srcImage_ ) , srcImageLayout( srcImageLayout_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyImageInfo2 & operator=( CopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyImageInfo2 & operator=( VkCopyImageInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImage( Image srcImage_ ) & VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setSrcImage( Image srcImage_ ) && VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) & VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setSrcImageLayout( ImageLayout srcImageLayout_ ) && VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 & setPRegions( const ImageCopy2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2 && setPRegions( const ImageCopy2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageInfo2 & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyImageInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyImageInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyImageInfo2 const & ) const = default; #else bool operator==( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyImageInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyImageInfo2; const void * pNext = {}; Image srcImage = {}; ImageLayout srcImageLayout = ImageLayout::eUndefined; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const ImageCopy2 * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyImageInfo2; }; #endif template <> struct CppType { using Type = CopyImageInfo2; }; using CopyImageInfo2KHR = CopyImageInfo2; // wrapper struct for struct VkCopyImageToBufferInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToBufferInfo2.html struct CopyImageToBufferInfo2 { using NativeType = VkCopyImageToBufferInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( Image srcImage_ = {}, ImageLayout srcImageLayout_ = ImageLayout::eUndefined, Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcImage{ srcImage_ } , srcImageLayout{ srcImageLayout_ } , dstBuffer{ dstBuffer_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyImageToBufferInfo2( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageToBufferInfo2( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToBufferInfo2( Image srcImage_, ImageLayout srcImageLayout_, Buffer dstBuffer_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , srcImage( srcImage_ ) , srcImageLayout( srcImageLayout_ ) , dstBuffer( dstBuffer_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyImageToBufferInfo2 & operator=( CopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyImageToBufferInfo2 & operator=( VkCopyImageToBufferInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImage( Image srcImage_ ) & VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setSrcImage( Image srcImage_ ) && VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setSrcImageLayout( ImageLayout srcImageLayout_ ) & VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setSrcImageLayout( ImageLayout srcImageLayout_ ) && VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setDstBuffer( Buffer dstBuffer_ ) & VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setDstBuffer( Buffer dstBuffer_ ) && VULKAN_HPP_NOEXCEPT { dstBuffer = dstBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 & setPRegions( const BufferImageCopy2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2 && setPRegions( const BufferImageCopy2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToBufferInfo2 & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyImageToBufferInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToBufferInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToBufferInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyImageToBufferInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyImageToBufferInfo2 const & ) const = default; #else bool operator==( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && ( dstBuffer == rhs.dstBuffer ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyImageToBufferInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyImageToBufferInfo2; const void * pNext = {}; Image srcImage = {}; ImageLayout srcImageLayout = ImageLayout::eUndefined; Buffer dstBuffer = {}; uint32_t regionCount = {}; const BufferImageCopy2 * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyImageToBufferInfo2; }; #endif template <> struct CppType { using Type = CopyImageToBufferInfo2; }; using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; // wrapper struct for struct VkCopyImageToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToImageInfo.html struct CopyImageToImageInfo { using NativeType = VkCopyImageToImageInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( HostImageCopyFlags flags_ = {}, Image srcImage_ = {}, ImageLayout srcImageLayout_ = ImageLayout::eUndefined, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , srcImage{ srcImage_ } , srcImageLayout{ srcImageLayout_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageToImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToImageInfo( HostImageCopyFlags flags_, Image srcImage_, ImageLayout srcImageLayout_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , srcImage( srcImage_ ) , srcImageLayout( srcImageLayout_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( HostImageCopyFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setFlags( HostImageCopyFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( Image srcImage_ ) & VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setSrcImage( Image srcImage_ ) && VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( ImageLayout srcImageLayout_ ) & VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setSrcImageLayout( ImageLayout srcImageLayout_ ) && VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const ImageCopy2 * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo && setPRegions( const ImageCopy2 * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToImageInfo & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToImageInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyImageToImageInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyImageToImageInfo const & ) const = default; #else bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyImageToImageInfo; const void * pNext = {}; HostImageCopyFlags flags = {}; Image srcImage = {}; ImageLayout srcImageLayout = ImageLayout::eUndefined; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const ImageCopy2 * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyImageToImageInfo; }; #endif template <> struct CppType { using Type = CopyImageToImageInfo; }; using CopyImageToImageInfoEXT = CopyImageToImageInfo; // wrapper struct for struct VkImageToMemoryCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageToMemoryCopy.html struct ImageToMemoryCopy { using NativeType = VkImageToMemoryCopy; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopy; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( void * pHostPointer_ = {}, uint32_t memoryRowLength_ = {}, uint32_t memoryImageHeight_ = {}, ImageSubresourceLayers imageSubresource_ = {}, Offset3D imageOffset_ = {}, Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pHostPointer{ pHostPointer_ } , memoryRowLength{ memoryRowLength_ } , memoryImageHeight{ memoryImageHeight_ } , imageSubresource{ imageSubresource_ } , imageOffset{ imageOffset_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageToMemoryCopy( *reinterpret_cast( &rhs ) ) {} ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) & VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setPHostPointer( void * pHostPointer_ ) && VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) & VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setMemoryRowLength( uint32_t memoryRowLength_ ) && VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) & VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setMemoryImageHeight( uint32_t memoryImageHeight_ ) && VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( Offset3D const & imageOffset_ ) & VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setImageOffset( Offset3D const & imageOffset_ ) && VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( Extent3D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy && setImageExtent( Extent3D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageToMemoryCopy const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageToMemoryCopy *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageToMemoryCopy const & ) const = default; #else bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageToMemoryCopy; const void * pNext = {}; void * pHostPointer = {}; uint32_t memoryRowLength = {}; uint32_t memoryImageHeight = {}; ImageSubresourceLayers imageSubresource = {}; Offset3D imageOffset = {}; Extent3D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageToMemoryCopy; }; #endif template <> struct CppType { using Type = ImageToMemoryCopy; }; using ImageToMemoryCopyEXT = ImageToMemoryCopy; // wrapper struct for struct VkCopyImageToMemoryInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyImageToMemoryInfo.html struct CopyImageToMemoryInfo { using NativeType = VkCopyImageToMemoryInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( HostImageCopyFlags flags_ = {}, Image srcImage_ = {}, ImageLayout srcImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const ImageToMemoryCopy * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , srcImage{ srcImage_ } , srcImageLayout{ srcImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CopyImageToMemoryInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToMemoryInfo( HostImageCopyFlags flags_, Image srcImage_, ImageLayout srcImageLayout_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , srcImage( srcImage_ ) , srcImageLayout( srcImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( HostImageCopyFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setFlags( HostImageCopyFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( Image srcImage_ ) & VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setSrcImage( Image srcImage_ ) && VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( ImageLayout srcImageLayout_ ) & VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setSrcImageLayout( ImageLayout srcImageLayout_ ) && VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const ImageToMemoryCopy * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo && setPRegions( const ImageToMemoryCopy * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyImageToMemoryInfo & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyImageToMemoryInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyImageToMemoryInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, srcImage, srcImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyImageToMemoryInfo const & ) const = default; #else bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( srcImage == rhs.srcImage ) && ( srcImageLayout == rhs.srcImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyImageToMemoryInfo; const void * pNext = {}; HostImageCopyFlags flags = {}; Image srcImage = {}; ImageLayout srcImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const ImageToMemoryCopy * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyImageToMemoryInfo; }; #endif template <> struct CppType { using Type = CopyImageToMemoryInfo; }; using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; // wrapper struct for struct VkCopyMemoryIndirectCommandKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectCommandKHR.html struct CopyMemoryIndirectCommandKHR { using NativeType = VkCopyMemoryIndirectCommandKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandKHR( DeviceAddress srcAddress_ = {}, DeviceAddress dstAddress_ = {}, DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT : srcAddress{ srcAddress_ } , dstAddress{ dstAddress_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandKHR( CopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryIndirectCommandKHR( VkCopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryIndirectCommandKHR( *reinterpret_cast( &rhs ) ) { } CopyMemoryIndirectCommandKHR & operator=( CopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryIndirectCommandKHR & operator=( VkCopyMemoryIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setSrcAddress( DeviceAddress srcAddress_ ) & VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR && setSrcAddress( DeviceAddress srcAddress_ ) && VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setDstAddress( DeviceAddress dstAddress_ ) & VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR && setDstAddress( DeviceAddress dstAddress_ ) && VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectCommandKHR && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcAddress, dstAddress, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMemoryIndirectCommandKHR const & ) const = default; #else bool operator==( CopyMemoryIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( size == rhs.size ); # endif } bool operator!=( CopyMemoryIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress srcAddress = {}; DeviceAddress dstAddress = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryIndirectCommandKHR; }; #endif using CopyMemoryIndirectCommandNV = CopyMemoryIndirectCommandKHR; // wrapper struct for struct VkStridedDeviceAddressRangeKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStridedDeviceAddressRangeKHR.html struct StridedDeviceAddressRangeKHR { using NativeType = VkStridedDeviceAddressRangeKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR StridedDeviceAddressRangeKHR( DeviceAddress address_ = {}, DeviceSize size_ = {}, DeviceSize stride_ = {} ) VULKAN_HPP_NOEXCEPT : address{ address_ } , size{ size_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR StridedDeviceAddressRangeKHR( StridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; StridedDeviceAddressRangeKHR( VkStridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT : StridedDeviceAddressRangeKHR( *reinterpret_cast( &rhs ) ) { } StridedDeviceAddressRangeKHR & operator=( StridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ StridedDeviceAddressRangeKHR & operator=( VkStridedDeviceAddressRangeKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setAddress( DeviceAddress address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR && setAddress( DeviceAddress address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR & setStride( DeviceSize stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRangeKHR && setStride( DeviceSize stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkStridedDeviceAddressRangeKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressRangeKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStridedDeviceAddressRangeKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkStridedDeviceAddressRangeKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( address, size, stride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( StridedDeviceAddressRangeKHR const & ) const = default; #else bool operator==( StridedDeviceAddressRangeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( address == rhs.address ) && ( size == rhs.size ) && ( stride == rhs.stride ); # endif } bool operator!=( StridedDeviceAddressRangeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress address = {}; DeviceSize size = {}; DeviceSize stride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = StridedDeviceAddressRangeKHR; }; #endif // wrapper struct for struct VkCopyMemoryIndirectInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryIndirectInfoKHR.html struct CopyMemoryIndirectInfoKHR { using NativeType = VkCopyMemoryIndirectInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryIndirectInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMemoryIndirectInfoKHR( AddressCopyFlagsKHR srcCopyFlags_ = {}, AddressCopyFlagsKHR dstCopyFlags_ = {}, uint32_t copyCount_ = {}, StridedDeviceAddressRangeKHR copyAddressRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcCopyFlags{ srcCopyFlags_ } , dstCopyFlags{ dstCopyFlags_ } , copyCount{ copyCount_ } , copyAddressRange{ copyAddressRange_ } { } VULKAN_HPP_CONSTEXPR CopyMemoryIndirectInfoKHR( CopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryIndirectInfoKHR( VkCopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryIndirectInfoKHR( *reinterpret_cast( &rhs ) ) { } CopyMemoryIndirectInfoKHR & operator=( CopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryIndirectInfoKHR & operator=( VkCopyMemoryIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) & VULKAN_HPP_NOEXCEPT { srcCopyFlags = srcCopyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR && setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) && VULKAN_HPP_NOEXCEPT { srcCopyFlags = srcCopyFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setDstCopyFlags( AddressCopyFlagsKHR dstCopyFlags_ ) & VULKAN_HPP_NOEXCEPT { dstCopyFlags = dstCopyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR && setDstCopyFlags( AddressCopyFlagsKHR dstCopyFlags_ ) && VULKAN_HPP_NOEXCEPT { dstCopyFlags = dstCopyFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setCopyCount( uint32_t copyCount_ ) & VULKAN_HPP_NOEXCEPT { copyCount = copyCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR && setCopyCount( uint32_t copyCount_ ) && VULKAN_HPP_NOEXCEPT { copyCount = copyCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR & setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) & VULKAN_HPP_NOEXCEPT { copyAddressRange = copyAddressRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryIndirectInfoKHR && setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) && VULKAN_HPP_NOEXCEPT { copyAddressRange = copyAddressRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryIndirectInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryIndirectInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryIndirectInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryIndirectInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcCopyFlags, dstCopyFlags, copyCount, copyAddressRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMemoryIndirectInfoKHR const & ) const = default; #else bool operator==( CopyMemoryIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcCopyFlags == rhs.srcCopyFlags ) && ( dstCopyFlags == rhs.dstCopyFlags ) && ( copyCount == rhs.copyCount ) && ( copyAddressRange == rhs.copyAddressRange ); # endif } bool operator!=( CopyMemoryIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyMemoryIndirectInfoKHR; const void * pNext = {}; AddressCopyFlagsKHR srcCopyFlags = {}; AddressCopyFlagsKHR dstCopyFlags = {}; uint32_t copyCount = {}; StridedDeviceAddressRangeKHR copyAddressRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryIndirectInfoKHR; }; #endif template <> struct CppType { using Type = CopyMemoryIndirectInfoKHR; }; // wrapper struct for struct VkCopyMemoryToAccelerationStructureInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToAccelerationStructureInfoKHR.html struct CopyMemoryToAccelerationStructureInfoKHR { using NativeType = VkCopyMemoryToAccelerationStructureInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( DeviceOrHostAddressConstKHR src_ = {}, AccelerationStructureKHR dst_ = {}, CopyAccelerationStructureModeKHR mode_ = CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast( &rhs ) ) { } CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setSrc( DeviceOrHostAddressConstKHR const & src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR && setSrc( DeviceOrHostAddressConstKHR const & src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setDst( AccelerationStructureKHR dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR && setDst( AccelerationStructureKHR dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR & setMode( CopyAccelerationStructureModeKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToAccelerationStructureInfoKHR && setMode( CopyAccelerationStructureModeKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryToAccelerationStructureInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToAccelerationStructureInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryToAccelerationStructureInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif public: StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR; const void * pNext = {}; DeviceOrHostAddressConstKHR src = {}; AccelerationStructureKHR dst = {}; CopyAccelerationStructureModeKHR mode = CopyAccelerationStructureModeKHR::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryToAccelerationStructureInfoKHR; }; #endif template <> struct CppType { using Type = CopyMemoryToAccelerationStructureInfoKHR; }; // wrapper struct for struct VkCopyMemoryToImageIndirectCommandKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectCommandKHR.html struct CopyMemoryToImageIndirectCommandKHR { using NativeType = VkCopyMemoryToImageIndirectCommandKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandKHR( DeviceAddress srcAddress_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, ImageSubresourceLayers imageSubresource_ = {}, Offset3D imageOffset_ = {}, Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT : srcAddress{ srcAddress_ } , bufferRowLength{ bufferRowLength_ } , bufferImageHeight{ bufferImageHeight_ } , imageSubresource{ imageSubresource_ } , imageOffset{ imageOffset_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectCommandKHR( CopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToImageIndirectCommandKHR( VkCopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryToImageIndirectCommandKHR( *reinterpret_cast( &rhs ) ) { } CopyMemoryToImageIndirectCommandKHR & operator=( CopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryToImageIndirectCommandKHR & operator=( VkCopyMemoryToImageIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setSrcAddress( DeviceAddress srcAddress_ ) & VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setSrcAddress( DeviceAddress srcAddress_ ) && VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setBufferRowLength( uint32_t bufferRowLength_ ) & VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setBufferRowLength( uint32_t bufferRowLength_ ) && VULKAN_HPP_NOEXCEPT { bufferRowLength = bufferRowLength_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) & VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setBufferImageHeight( uint32_t bufferImageHeight_ ) && VULKAN_HPP_NOEXCEPT { bufferImageHeight = bufferImageHeight_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageOffset( Offset3D const & imageOffset_ ) & VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setImageOffset( Offset3D const & imageOffset_ ) && VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR & setImageExtent( Extent3D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectCommandKHR && setImageExtent( Extent3D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryToImageIndirectCommandKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectCommandKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectCommandKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcAddress, bufferRowLength, bufferImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMemoryToImageIndirectCommandKHR const & ) const = default; #else bool operator==( CopyMemoryToImageIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcAddress == rhs.srcAddress ) && ( bufferRowLength == rhs.bufferRowLength ) && ( bufferImageHeight == rhs.bufferImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( CopyMemoryToImageIndirectCommandKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress srcAddress = {}; uint32_t bufferRowLength = {}; uint32_t bufferImageHeight = {}; ImageSubresourceLayers imageSubresource = {}; Offset3D imageOffset = {}; Extent3D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryToImageIndirectCommandKHR; }; #endif using CopyMemoryToImageIndirectCommandNV = CopyMemoryToImageIndirectCommandKHR; // wrapper struct for struct VkCopyMemoryToImageIndirectInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageIndirectInfoKHR.html struct CopyMemoryToImageIndirectInfoKHR { using NativeType = VkCopyMemoryToImageIndirectInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageIndirectInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectInfoKHR( AddressCopyFlagsKHR srcCopyFlags_ = {}, uint32_t copyCount_ = {}, StridedDeviceAddressRangeKHR copyAddressRange_ = {}, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, const ImageSubresourceLayers * pImageSubresources_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcCopyFlags{ srcCopyFlags_ } , copyCount{ copyCount_ } , copyAddressRange{ copyAddressRange_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , pImageSubresources{ pImageSubresources_ } { } VULKAN_HPP_CONSTEXPR CopyMemoryToImageIndirectInfoKHR( CopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToImageIndirectInfoKHR( VkCopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryToImageIndirectInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyMemoryToImageIndirectInfoKHR( AddressCopyFlagsKHR srcCopyFlags_, StridedDeviceAddressRangeKHR copyAddressRange_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & imageSubresources_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , srcCopyFlags( srcCopyFlags_ ) , copyCount( static_cast( imageSubresources_.size() ) ) , copyAddressRange( copyAddressRange_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , pImageSubresources( imageSubresources_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyMemoryToImageIndirectInfoKHR & operator=( CopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryToImageIndirectInfoKHR & operator=( VkCopyMemoryToImageIndirectInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) & VULKAN_HPP_NOEXCEPT { srcCopyFlags = srcCopyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setSrcCopyFlags( AddressCopyFlagsKHR srcCopyFlags_ ) && VULKAN_HPP_NOEXCEPT { srcCopyFlags = srcCopyFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setCopyCount( uint32_t copyCount_ ) & VULKAN_HPP_NOEXCEPT { copyCount = copyCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setCopyCount( uint32_t copyCount_ ) && VULKAN_HPP_NOEXCEPT { copyCount = copyCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) & VULKAN_HPP_NOEXCEPT { copyAddressRange = copyAddressRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setCopyAddressRange( StridedDeviceAddressRangeKHR const & copyAddressRange_ ) && VULKAN_HPP_NOEXCEPT { copyAddressRange = copyAddressRange_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR & setPImageSubresources( const ImageSubresourceLayers * pImageSubresources_ ) & VULKAN_HPP_NOEXCEPT { pImageSubresources = pImageSubresources_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageIndirectInfoKHR && setPImageSubresources( const ImageSubresourceLayers * pImageSubresources_ ) && VULKAN_HPP_NOEXCEPT { pImageSubresources = pImageSubresources_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyMemoryToImageIndirectInfoKHR & setImageSubresources( ArrayProxyNoTemporaries const & imageSubresources_ ) VULKAN_HPP_NOEXCEPT { copyCount = static_cast( imageSubresources_.size() ); pImageSubresources = imageSubresources_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryToImageIndirectInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryToImageIndirectInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcCopyFlags, copyCount, copyAddressRange, dstImage, dstImageLayout, pImageSubresources ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMemoryToImageIndirectInfoKHR const & ) const = default; #else bool operator==( CopyMemoryToImageIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcCopyFlags == rhs.srcCopyFlags ) && ( copyCount == rhs.copyCount ) && ( copyAddressRange == rhs.copyAddressRange ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( pImageSubresources == rhs.pImageSubresources ); # endif } bool operator!=( CopyMemoryToImageIndirectInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyMemoryToImageIndirectInfoKHR; const void * pNext = {}; AddressCopyFlagsKHR srcCopyFlags = {}; uint32_t copyCount = {}; StridedDeviceAddressRangeKHR copyAddressRange = {}; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; const ImageSubresourceLayers * pImageSubresources = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryToImageIndirectInfoKHR; }; #endif template <> struct CppType { using Type = CopyMemoryToImageIndirectInfoKHR; }; // wrapper struct for struct VkMemoryToImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryToImageCopy.html struct MemoryToImageCopy { using NativeType = VkMemoryToImageCopy; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopy; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryToImageCopy( const void * pHostPointer_ = {}, uint32_t memoryRowLength_ = {}, uint32_t memoryImageHeight_ = {}, ImageSubresourceLayers imageSubresource_ = {}, Offset3D imageOffset_ = {}, Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pHostPointer{ pHostPointer_ } , memoryRowLength{ memoryRowLength_ } , memoryImageHeight{ memoryImageHeight_ } , imageSubresource{ imageSubresource_ } , imageOffset{ imageOffset_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryToImageCopy( *reinterpret_cast( &rhs ) ) {} MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) & VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setPHostPointer( const void * pHostPointer_ ) && VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) & VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setMemoryRowLength( uint32_t memoryRowLength_ ) && VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) & VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setMemoryImageHeight( uint32_t memoryImageHeight_ ) && VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setImageSubresource( ImageSubresourceLayers const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( Offset3D const & imageOffset_ ) & VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setImageOffset( Offset3D const & imageOffset_ ) && VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( Extent3D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy && setImageExtent( Extent3D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryToImageCopy const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryToImageCopy *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pHostPointer, memoryRowLength, memoryImageHeight, imageSubresource, imageOffset, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryToImageCopy const & ) const = default; #else bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pHostPointer == rhs.pHostPointer ) && ( memoryRowLength == rhs.memoryRowLength ) && ( memoryImageHeight == rhs.memoryImageHeight ) && ( imageSubresource == rhs.imageSubresource ) && ( imageOffset == rhs.imageOffset ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryToImageCopy; const void * pNext = {}; const void * pHostPointer = {}; uint32_t memoryRowLength = {}; uint32_t memoryImageHeight = {}; ImageSubresourceLayers imageSubresource = {}; Offset3D imageOffset = {}; Extent3D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryToImageCopy; }; #endif template <> struct CppType { using Type = MemoryToImageCopy; }; using MemoryToImageCopyEXT = MemoryToImageCopy; // wrapper struct for struct VkCopyMemoryToImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToImageInfo.html struct CopyMemoryToImageInfo { using NativeType = VkCopyMemoryToImageInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( HostImageCopyFlags flags_ = {}, Image dstImage_ = {}, ImageLayout dstImageLayout_ = ImageLayout::eUndefined, uint32_t regionCount_ = {}, const MemoryToImageCopy * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , dstImage{ dstImage_ } , dstImageLayout{ dstImageLayout_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryToImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyMemoryToImageInfo( HostImageCopyFlags flags_, Image dstImage_, ImageLayout dstImageLayout_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , dstImage( dstImage_ ) , dstImageLayout( dstImageLayout_ ) , regionCount( static_cast( regions_.size() ) ) , pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( HostImageCopyFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setFlags( HostImageCopyFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( Image dstImage_ ) & VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setDstImage( Image dstImage_ ) && VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( ImageLayout dstImageLayout_ ) & VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setDstImageLayout( ImageLayout dstImageLayout_ ) && VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const MemoryToImageCopy * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo && setPRegions( const MemoryToImageCopy * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyMemoryToImageInfo & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToImageInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryToImageInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, dstImage, dstImageLayout, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMemoryToImageInfo const & ) const = default; #else bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dstImage == rhs.dstImage ) && ( dstImageLayout == rhs.dstImageLayout ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyMemoryToImageInfo; const void * pNext = {}; HostImageCopyFlags flags = {}; Image dstImage = {}; ImageLayout dstImageLayout = ImageLayout::eUndefined; uint32_t regionCount = {}; const MemoryToImageCopy * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryToImageInfo; }; #endif template <> struct CppType { using Type = CopyMemoryToImageInfo; }; using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; // wrapper struct for struct VkCopyMemoryToMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMemoryToMicromapInfoEXT.html struct CopyMemoryToMicromapInfoEXT { using NativeType = VkCopyMemoryToMicromapInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToMicromapInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( DeviceOrHostAddressConstKHR src_ = {}, MicromapEXT dst_ = {}, CopyMicromapModeEXT mode_ = CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMemoryToMicromapInfoEXT( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMemoryToMicromapInfoEXT( *reinterpret_cast( &rhs ) ) { } CopyMemoryToMicromapInfoEXT & operator=( CopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMemoryToMicromapInfoEXT & operator=( VkCopyMemoryToMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setSrc( DeviceOrHostAddressConstKHR const & src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT && setSrc( DeviceOrHostAddressConstKHR const & src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setDst( MicromapEXT dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT && setDst( MicromapEXT dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT & setMode( CopyMicromapModeEXT mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMemoryToMicromapInfoEXT && setMode( CopyMicromapModeEXT mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMemoryToMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMemoryToMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMemoryToMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif public: StructureType sType = StructureType::eCopyMemoryToMicromapInfoEXT; const void * pNext = {}; DeviceOrHostAddressConstKHR src = {}; MicromapEXT dst = {}; CopyMicromapModeEXT mode = CopyMicromapModeEXT::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMemoryToMicromapInfoEXT; }; #endif template <> struct CppType { using Type = CopyMemoryToMicromapInfoEXT; }; // wrapper struct for struct VkCopyMicromapInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapInfoEXT.html struct CopyMicromapInfoEXT { using NativeType = VkCopyMicromapInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( MicromapEXT src_ = {}, MicromapEXT dst_ = {}, CopyMicromapModeEXT mode_ = CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR CopyMicromapInfoEXT( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMicromapInfoEXT( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapInfoEXT( *reinterpret_cast( &rhs ) ) { } CopyMicromapInfoEXT & operator=( CopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMicromapInfoEXT & operator=( VkCopyMicromapInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setSrc( MicromapEXT src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT && setSrc( MicromapEXT src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setDst( MicromapEXT dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT && setDst( MicromapEXT dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT & setMode( CopyMicromapModeEXT mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapInfoEXT && setMode( CopyMicromapModeEXT mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMicromapInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMicromapInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMicromapInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMicromapInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyMicromapInfoEXT const & ) const = default; #else bool operator==( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( src == rhs.src ) && ( dst == rhs.dst ) && ( mode == rhs.mode ); # endif } bool operator!=( CopyMicromapInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyMicromapInfoEXT; const void * pNext = {}; MicromapEXT src = {}; MicromapEXT dst = {}; CopyMicromapModeEXT mode = CopyMicromapModeEXT::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMicromapInfoEXT; }; #endif template <> struct CppType { using Type = CopyMicromapInfoEXT; }; // wrapper struct for struct VkCopyMicromapToMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyMicromapToMemoryInfoEXT.html struct CopyMicromapToMemoryInfoEXT { using NativeType = VkCopyMicromapToMemoryInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMicromapToMemoryInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( MicromapEXT src_ = {}, DeviceOrHostAddressKHR dst_ = {}, CopyMicromapModeEXT mode_ = CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , src{ src_ } , dst{ dst_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyMicromapToMemoryInfoEXT( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CopyMicromapToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) { } CopyMicromapToMemoryInfoEXT & operator=( CopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyMicromapToMemoryInfoEXT & operator=( VkCopyMicromapToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setSrc( MicromapEXT src_ ) & VULKAN_HPP_NOEXCEPT { src = src_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT && setSrc( MicromapEXT src_ ) && VULKAN_HPP_NOEXCEPT { src = src_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setDst( DeviceOrHostAddressKHR const & dst_ ) & VULKAN_HPP_NOEXCEPT { dst = dst_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT && setDst( DeviceOrHostAddressKHR const & dst_ ) && VULKAN_HPP_NOEXCEPT { dst = dst_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT & setMode( CopyMicromapModeEXT mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyMicromapToMemoryInfoEXT && setMode( CopyMicromapModeEXT mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyMicromapToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMicromapToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyMicromapToMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyMicromapToMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, src, dst, mode ); } #endif public: StructureType sType = StructureType::eCopyMicromapToMemoryInfoEXT; const void * pNext = {}; MicromapEXT src = {}; DeviceOrHostAddressKHR dst = {}; CopyMicromapModeEXT mode = CopyMicromapModeEXT::eClone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyMicromapToMemoryInfoEXT; }; #endif template <> struct CppType { using Type = CopyMicromapToMemoryInfoEXT; }; // wrapper struct for struct VkTensorCopyARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCopyARM.html struct TensorCopyARM { using NativeType = VkTensorCopyARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorCopyARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR TensorCopyARM( uint32_t dimensionCount_ = {}, const uint64_t * pSrcOffset_ = {}, const uint64_t * pDstOffset_ = {}, const uint64_t * pExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dimensionCount{ dimensionCount_ } , pSrcOffset{ pSrcOffset_ } , pDstOffset{ pDstOffset_ } , pExtent{ pExtent_ } { } VULKAN_HPP_CONSTEXPR TensorCopyARM( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; TensorCopyARM( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT : TensorCopyARM( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCopyARM( ArrayProxyNoTemporaries const & srcOffset_, ArrayProxyNoTemporaries const & dstOffset_ = {}, ArrayProxyNoTemporaries const & extent_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , dimensionCount( static_cast( srcOffset_.size() ) ) , pSrcOffset( srcOffset_.data() ) , pDstOffset( dstOffset_.data() ) , pExtent( extent_.data() ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( srcOffset_.empty() || dstOffset_.empty() || ( srcOffset_.size() == dstOffset_.size() ) ); VULKAN_HPP_ASSERT( srcOffset_.empty() || extent_.empty() || ( srcOffset_.size() == extent_.size() ) ); VULKAN_HPP_ASSERT( dstOffset_.empty() || extent_.empty() || ( dstOffset_.size() == extent_.size() ) ); # else if ( !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !dstOffset_.empty() && ( srcOffset_.size() != dstOffset_.size() )" ); } if ( !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::TensorCopyARM::TensorCopyARM: !srcOffset_.empty() && !extent_.empty() && ( srcOffset_.size() != extent_.size() )" ); } if ( !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::TensorCopyARM::TensorCopyARM: !dstOffset_.empty() && !extent_.empty() && ( dstOffset_.size() != extent_.size() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ TensorCopyARM & operator=( TensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ TensorCopyARM & operator=( VkTensorCopyARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setDimensionCount( uint32_t dimensionCount_ ) & VULKAN_HPP_NOEXCEPT { dimensionCount = dimensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM && setDimensionCount( uint32_t dimensionCount_ ) && VULKAN_HPP_NOEXCEPT { dimensionCount = dimensionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPSrcOffset( const uint64_t * pSrcOffset_ ) & VULKAN_HPP_NOEXCEPT { pSrcOffset = pSrcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM && setPSrcOffset( const uint64_t * pSrcOffset_ ) && VULKAN_HPP_NOEXCEPT { pSrcOffset = pSrcOffset_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCopyARM & setSrcOffset( ArrayProxyNoTemporaries const & srcOffset_ ) VULKAN_HPP_NOEXCEPT { dimensionCount = static_cast( srcOffset_.size() ); pSrcOffset = srcOffset_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPDstOffset( const uint64_t * pDstOffset_ ) & VULKAN_HPP_NOEXCEPT { pDstOffset = pDstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM && setPDstOffset( const uint64_t * pDstOffset_ ) && VULKAN_HPP_NOEXCEPT { pDstOffset = pDstOffset_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCopyARM & setDstOffset( ArrayProxyNoTemporaries const & dstOffset_ ) VULKAN_HPP_NOEXCEPT { dimensionCount = static_cast( dstOffset_.size() ); pDstOffset = dstOffset_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 TensorCopyARM & setPExtent( const uint64_t * pExtent_ ) & VULKAN_HPP_NOEXCEPT { pExtent = pExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCopyARM && setPExtent( const uint64_t * pExtent_ ) && VULKAN_HPP_NOEXCEPT { pExtent = pExtent_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCopyARM & setExtent( ArrayProxyNoTemporaries const & extent_ ) VULKAN_HPP_NOEXCEPT { dimensionCount = static_cast( extent_.size() ); pExtent = extent_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkTensorCopyARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorCopyARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorCopyARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkTensorCopyARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dimensionCount, pSrcOffset, pDstOffset, pExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( TensorCopyARM const & ) const = default; #else bool operator==( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dimensionCount == rhs.dimensionCount ) && ( pSrcOffset == rhs.pSrcOffset ) && ( pDstOffset == rhs.pDstOffset ) && ( pExtent == rhs.pExtent ); # endif } bool operator!=( TensorCopyARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eTensorCopyARM; const void * pNext = {}; uint32_t dimensionCount = {}; const uint64_t * pSrcOffset = {}; const uint64_t * pDstOffset = {}; const uint64_t * pExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = TensorCopyARM; }; #endif template <> struct CppType { using Type = TensorCopyARM; }; // wrapper struct for struct VkCopyTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCopyTensorInfoARM.html struct CopyTensorInfoARM { using NativeType = VkCopyTensorInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyTensorInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CopyTensorInfoARM( TensorARM srcTensor_ = {}, TensorARM dstTensor_ = {}, uint32_t regionCount_ = {}, const TensorCopyARM * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcTensor{ srcTensor_ } , dstTensor{ dstTensor_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR CopyTensorInfoARM( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; CopyTensorInfoARM( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : CopyTensorInfoARM( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyTensorInfoARM( TensorARM srcTensor_, TensorARM dstTensor_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ), srcTensor( srcTensor_ ), dstTensor( dstTensor_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CopyTensorInfoARM & operator=( CopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CopyTensorInfoARM & operator=( VkCopyTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setSrcTensor( TensorARM srcTensor_ ) & VULKAN_HPP_NOEXCEPT { srcTensor = srcTensor_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM && setSrcTensor( TensorARM srcTensor_ ) && VULKAN_HPP_NOEXCEPT { srcTensor = srcTensor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setDstTensor( TensorARM dstTensor_ ) & VULKAN_HPP_NOEXCEPT { dstTensor = dstTensor_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM && setDstTensor( TensorARM dstTensor_ ) && VULKAN_HPP_NOEXCEPT { dstTensor = dstTensor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM & setPRegions( const TensorCopyARM * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 CopyTensorInfoARM && setPRegions( const TensorCopyARM * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CopyTensorInfoARM & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCopyTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyTensorInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCopyTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCopyTensorInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcTensor, dstTensor, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CopyTensorInfoARM const & ) const = default; #else bool operator==( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcTensor == rhs.srcTensor ) && ( dstTensor == rhs.dstTensor ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( CopyTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCopyTensorInfoARM; const void * pNext = {}; TensorARM srcTensor = {}; TensorARM dstTensor = {}; uint32_t regionCount = {}; const TensorCopyARM * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CopyTensorInfoARM; }; #endif template <> struct CppType { using Type = CopyTensorInfoARM; }; // wrapper struct for struct VkCuFunctionCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuFunctionCreateInfoNVX.html struct CuFunctionCreateInfoNVX { using NativeType = VkCuFunctionCreateInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuFunctionCreateInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , module{ module_ } , pName{ pName_ } { } VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; CuFunctionCreateInfoNVX( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuFunctionCreateInfoNVX( *reinterpret_cast( &rhs ) ) { } CuFunctionCreateInfoNVX & operator=( CuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CuFunctionCreateInfoNVX & operator=( VkCuFunctionCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setModule( CuModuleNVX module_ ) & VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX && setModule( CuModuleNVX module_ ) && VULKAN_HPP_NOEXCEPT { module = module_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX & setPName( const char * pName_ ) & VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuFunctionCreateInfoNVX && setPName( const char * pName_ ) && VULKAN_HPP_NOEXCEPT { pName = pName_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCuFunctionCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuFunctionCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuFunctionCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCuFunctionCreateInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, module, pName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp; if ( pName != rhs.pName ) if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); } bool operator!=( CuFunctionCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eCuFunctionCreateInfoNVX; const void * pNext = {}; CuModuleNVX module = {}; const char * pName = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CuFunctionCreateInfoNVX; }; #endif template <> struct CppType { using Type = CuFunctionCreateInfoNVX; }; // wrapper struct for struct VkCuLaunchInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuLaunchInfoNVX.html struct CuLaunchInfoNVX { using NativeType = VkCuLaunchInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuLaunchInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuFunctionNVX function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , function{ function_ } , gridDimX{ gridDimX_ } , gridDimY{ gridDimY_ } , gridDimZ{ gridDimZ_ } , blockDimX{ blockDimX_ } , blockDimY{ blockDimY_ } , blockDimZ{ blockDimZ_ } , sharedMemBytes{ sharedMemBytes_ } , paramCount{ paramCount_ } , pParams{ pParams_ } , extraCount{ extraCount_ } , pExtras{ pExtras_ } { } VULKAN_HPP_CONSTEXPR CuLaunchInfoNVX( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; CuLaunchInfoNVX( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuLaunchInfoNVX( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CuLaunchInfoNVX( CuFunctionNVX function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, ArrayProxyNoTemporaries const & params_, ArrayProxyNoTemporaries const & extras_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , function( function_ ) , gridDimX( gridDimX_ ) , gridDimY( gridDimY_ ) , gridDimZ( gridDimZ_ ) , blockDimX( blockDimX_ ) , blockDimY( blockDimY_ ) , blockDimZ( blockDimZ_ ) , sharedMemBytes( sharedMemBytes_ ) , paramCount( params_.size() ) , pParams( params_.data() ) , extraCount( extras_.size() ) , pExtras( extras_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CuLaunchInfoNVX & operator=( CuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CuLaunchInfoNVX & operator=( VkCuLaunchInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setFunction( CuFunctionNVX function_ ) & VULKAN_HPP_NOEXCEPT { function = function_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setFunction( CuFunctionNVX function_ ) && VULKAN_HPP_NOEXCEPT { function = function_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimX( uint32_t gridDimX_ ) & VULKAN_HPP_NOEXCEPT { gridDimX = gridDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setGridDimX( uint32_t gridDimX_ ) && VULKAN_HPP_NOEXCEPT { gridDimX = gridDimX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimY( uint32_t gridDimY_ ) & VULKAN_HPP_NOEXCEPT { gridDimY = gridDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setGridDimY( uint32_t gridDimY_ ) && VULKAN_HPP_NOEXCEPT { gridDimY = gridDimY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setGridDimZ( uint32_t gridDimZ_ ) & VULKAN_HPP_NOEXCEPT { gridDimZ = gridDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setGridDimZ( uint32_t gridDimZ_ ) && VULKAN_HPP_NOEXCEPT { gridDimZ = gridDimZ_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimX( uint32_t blockDimX_ ) & VULKAN_HPP_NOEXCEPT { blockDimX = blockDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setBlockDimX( uint32_t blockDimX_ ) && VULKAN_HPP_NOEXCEPT { blockDimX = blockDimX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimY( uint32_t blockDimY_ ) & VULKAN_HPP_NOEXCEPT { blockDimY = blockDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setBlockDimY( uint32_t blockDimY_ ) && VULKAN_HPP_NOEXCEPT { blockDimY = blockDimY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setBlockDimZ( uint32_t blockDimZ_ ) & VULKAN_HPP_NOEXCEPT { blockDimZ = blockDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setBlockDimZ( uint32_t blockDimZ_ ) && VULKAN_HPP_NOEXCEPT { blockDimZ = blockDimZ_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setSharedMemBytes( uint32_t sharedMemBytes_ ) & VULKAN_HPP_NOEXCEPT { sharedMemBytes = sharedMemBytes_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setSharedMemBytes( uint32_t sharedMemBytes_ ) && VULKAN_HPP_NOEXCEPT { sharedMemBytes = sharedMemBytes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setParamCount( size_t paramCount_ ) & VULKAN_HPP_NOEXCEPT { paramCount = paramCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setParamCount( size_t paramCount_ ) && VULKAN_HPP_NOEXCEPT { paramCount = paramCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPParams( const void * const * pParams_ ) & VULKAN_HPP_NOEXCEPT { pParams = pParams_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setPParams( const void * const * pParams_ ) && VULKAN_HPP_NOEXCEPT { pParams = pParams_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CuLaunchInfoNVX & setParams( ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT { paramCount = params_.size(); pParams = params_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setExtraCount( size_t extraCount_ ) & VULKAN_HPP_NOEXCEPT { extraCount = extraCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setExtraCount( size_t extraCount_ ) && VULKAN_HPP_NOEXCEPT { extraCount = extraCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX & setPExtras( const void * const * pExtras_ ) & VULKAN_HPP_NOEXCEPT { pExtras = pExtras_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuLaunchInfoNVX && setPExtras( const void * const * pExtras_ ) && VULKAN_HPP_NOEXCEPT { pExtras = pExtras_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CuLaunchInfoNVX & setExtras( ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT { extraCount = extras_.size(); pExtras = extras_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCuLaunchInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuLaunchInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuLaunchInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCuLaunchInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CuLaunchInfoNVX const & ) const = default; #else bool operator==( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras ); # endif } bool operator!=( CuLaunchInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCuLaunchInfoNVX; const void * pNext = {}; CuFunctionNVX function = {}; uint32_t gridDimX = {}; uint32_t gridDimY = {}; uint32_t gridDimZ = {}; uint32_t blockDimX = {}; uint32_t blockDimY = {}; uint32_t blockDimZ = {}; uint32_t sharedMemBytes = {}; size_t paramCount = {}; const void * const * pParams = {}; size_t extraCount = {}; const void * const * pExtras = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CuLaunchInfoNVX; }; #endif template <> struct CppType { using Type = CuLaunchInfoNVX; }; // wrapper struct for struct VkCuModuleCreateInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleCreateInfoNVX.html struct CuModuleCreateInfoNVX { using NativeType = VkCuModuleCreateInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleCreateInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dataSize{ dataSize_ } , pData{ pData_ } { } VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; CuModuleCreateInfoNVX( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuModuleCreateInfoNVX( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template CuModuleCreateInfoNVX( ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CuModuleCreateInfoNVX & operator=( CuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CuModuleCreateInfoNVX & operator=( VkCuModuleCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setDataSize( size_t dataSize_ ) & VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX && setDataSize( size_t dataSize_ ) && VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX & setPData( const void * pData_ ) & VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleCreateInfoNVX && setPData( const void * pData_ ) && VULKAN_HPP_NOEXCEPT { pData = pData_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template CuModuleCreateInfoNVX & setData( ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof( T ); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCuModuleCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuModuleCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuModuleCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCuModuleCreateInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CuModuleCreateInfoNVX const & ) const = default; #else bool operator==( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } bool operator!=( CuModuleCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCuModuleCreateInfoNVX; const void * pNext = {}; size_t dataSize = {}; const void * pData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CuModuleCreateInfoNVX; }; #endif template <> struct CppType { using Type = CuModuleCreateInfoNVX; }; // wrapper struct for struct VkCuModuleTexturingModeCreateInfoNVX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkCuModuleTexturingModeCreateInfoNVX.html struct CuModuleTexturingModeCreateInfoNVX { using NativeType = VkCuModuleTexturingModeCreateInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleTexturingModeCreateInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( Bool32 use64bitTexturing_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , use64bitTexturing{ use64bitTexturing_ } { } VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast( &rhs ) ) { } CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( Bool32 use64bitTexturing_ ) & VULKAN_HPP_NOEXCEPT { use64bitTexturing = use64bitTexturing_; return *this; } VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX && setUse64bitTexturing( Bool32 use64bitTexturing_ ) && VULKAN_HPP_NOEXCEPT { use64bitTexturing = use64bitTexturing_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCuModuleTexturingModeCreateInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCuModuleTexturingModeCreateInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, use64bitTexturing ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default; #else bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( use64bitTexturing == rhs.use64bitTexturing ); # endif } bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCuModuleTexturingModeCreateInfoNVX; const void * pNext = {}; Bool32 use64bitTexturing = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CuModuleTexturingModeCreateInfoNVX; }; #endif template <> struct CppType { using Type = CuModuleTexturingModeCreateInfoNVX; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkCudaFunctionCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaFunctionCreateInfoNV.html struct CudaFunctionCreateInfoNV { using NativeType = VkCudaFunctionCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaFunctionCreateInfoNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , module{ module_ } , pName{ pName_ } { } VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CudaFunctionCreateInfoNV( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaFunctionCreateInfoNV( *reinterpret_cast( &rhs ) ) { } CudaFunctionCreateInfoNV & operator=( CudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CudaFunctionCreateInfoNV & operator=( VkCudaFunctionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setModule( CudaModuleNV module_ ) & VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV && setModule( CudaModuleNV module_ ) && VULKAN_HPP_NOEXCEPT { module = module_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV & setPName( const char * pName_ ) & VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaFunctionCreateInfoNV && setPName( const char * pName_ ) && VULKAN_HPP_NOEXCEPT { pName = pName_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCudaFunctionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaFunctionCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaFunctionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCudaFunctionCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, module, pName ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp; if ( pName != rhs.pName ) if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ); } bool operator!=( CudaFunctionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eCudaFunctionCreateInfoNV; const void * pNext = {}; CudaModuleNV module = {}; const char * pName = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CudaFunctionCreateInfoNV; }; # endif template <> struct CppType { using Type = CudaFunctionCreateInfoNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkCudaLaunchInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaLaunchInfoNV.html struct CudaLaunchInfoNV { using NativeType = VkCudaLaunchInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaLaunchInfoNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaFunctionNV function_ = {}, uint32_t gridDimX_ = {}, uint32_t gridDimY_ = {}, uint32_t gridDimZ_ = {}, uint32_t blockDimX_ = {}, uint32_t blockDimY_ = {}, uint32_t blockDimZ_ = {}, uint32_t sharedMemBytes_ = {}, size_t paramCount_ = {}, const void * const * pParams_ = {}, size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , function{ function_ } , gridDimX{ gridDimX_ } , gridDimY{ gridDimY_ } , gridDimZ{ gridDimZ_ } , blockDimX{ blockDimX_ } , blockDimY{ blockDimY_ } , blockDimZ{ blockDimZ_ } , sharedMemBytes{ sharedMemBytes_ } , paramCount{ paramCount_ } , pParams{ pParams_ } , extraCount{ extraCount_ } , pExtras{ pExtras_ } { } VULKAN_HPP_CONSTEXPR CudaLaunchInfoNV( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CudaLaunchInfoNV( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaLaunchInfoNV( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CudaLaunchInfoNV( CudaFunctionNV function_, uint32_t gridDimX_, uint32_t gridDimY_, uint32_t gridDimZ_, uint32_t blockDimX_, uint32_t blockDimY_, uint32_t blockDimZ_, uint32_t sharedMemBytes_, ArrayProxyNoTemporaries const & params_, ArrayProxyNoTemporaries const & extras_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , function( function_ ) , gridDimX( gridDimX_ ) , gridDimY( gridDimY_ ) , gridDimZ( gridDimZ_ ) , blockDimX( blockDimX_ ) , blockDimY( blockDimY_ ) , blockDimZ( blockDimZ_ ) , sharedMemBytes( sharedMemBytes_ ) , paramCount( params_.size() ) , pParams( params_.data() ) , extraCount( extras_.size() ) , pExtras( extras_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CudaLaunchInfoNV & operator=( CudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CudaLaunchInfoNV & operator=( VkCudaLaunchInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setFunction( CudaFunctionNV function_ ) & VULKAN_HPP_NOEXCEPT { function = function_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setFunction( CudaFunctionNV function_ ) && VULKAN_HPP_NOEXCEPT { function = function_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimX( uint32_t gridDimX_ ) & VULKAN_HPP_NOEXCEPT { gridDimX = gridDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setGridDimX( uint32_t gridDimX_ ) && VULKAN_HPP_NOEXCEPT { gridDimX = gridDimX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimY( uint32_t gridDimY_ ) & VULKAN_HPP_NOEXCEPT { gridDimY = gridDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setGridDimY( uint32_t gridDimY_ ) && VULKAN_HPP_NOEXCEPT { gridDimY = gridDimY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setGridDimZ( uint32_t gridDimZ_ ) & VULKAN_HPP_NOEXCEPT { gridDimZ = gridDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setGridDimZ( uint32_t gridDimZ_ ) && VULKAN_HPP_NOEXCEPT { gridDimZ = gridDimZ_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimX( uint32_t blockDimX_ ) & VULKAN_HPP_NOEXCEPT { blockDimX = blockDimX_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setBlockDimX( uint32_t blockDimX_ ) && VULKAN_HPP_NOEXCEPT { blockDimX = blockDimX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimY( uint32_t blockDimY_ ) & VULKAN_HPP_NOEXCEPT { blockDimY = blockDimY_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setBlockDimY( uint32_t blockDimY_ ) && VULKAN_HPP_NOEXCEPT { blockDimY = blockDimY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setBlockDimZ( uint32_t blockDimZ_ ) & VULKAN_HPP_NOEXCEPT { blockDimZ = blockDimZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setBlockDimZ( uint32_t blockDimZ_ ) && VULKAN_HPP_NOEXCEPT { blockDimZ = blockDimZ_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setSharedMemBytes( uint32_t sharedMemBytes_ ) & VULKAN_HPP_NOEXCEPT { sharedMemBytes = sharedMemBytes_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setSharedMemBytes( uint32_t sharedMemBytes_ ) && VULKAN_HPP_NOEXCEPT { sharedMemBytes = sharedMemBytes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setParamCount( size_t paramCount_ ) & VULKAN_HPP_NOEXCEPT { paramCount = paramCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setParamCount( size_t paramCount_ ) && VULKAN_HPP_NOEXCEPT { paramCount = paramCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPParams( const void * const * pParams_ ) & VULKAN_HPP_NOEXCEPT { pParams = pParams_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setPParams( const void * const * pParams_ ) && VULKAN_HPP_NOEXCEPT { pParams = pParams_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CudaLaunchInfoNV & setParams( ArrayProxyNoTemporaries const & params_ ) VULKAN_HPP_NOEXCEPT { paramCount = params_.size(); pParams = params_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setExtraCount( size_t extraCount_ ) & VULKAN_HPP_NOEXCEPT { extraCount = extraCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setExtraCount( size_t extraCount_ ) && VULKAN_HPP_NOEXCEPT { extraCount = extraCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV & setPExtras( const void * const * pExtras_ ) & VULKAN_HPP_NOEXCEPT { pExtras = pExtras_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaLaunchInfoNV && setPExtras( const void * const * pExtras_ ) && VULKAN_HPP_NOEXCEPT { pExtras = pExtras_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CudaLaunchInfoNV & setExtras( ArrayProxyNoTemporaries const & extras_ ) VULKAN_HPP_NOEXCEPT { extraCount = extras_.size(); pExtras = extras_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCudaLaunchInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaLaunchInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaLaunchInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCudaLaunchInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, function, gridDimX, gridDimY, gridDimZ, blockDimX, blockDimY, blockDimZ, sharedMemBytes, paramCount, pParams, extraCount, pExtras ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CudaLaunchInfoNV const & ) const = default; # else bool operator==( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( function == rhs.function ) && ( gridDimX == rhs.gridDimX ) && ( gridDimY == rhs.gridDimY ) && ( gridDimZ == rhs.gridDimZ ) && ( blockDimX == rhs.blockDimX ) && ( blockDimY == rhs.blockDimY ) && ( blockDimZ == rhs.blockDimZ ) && ( sharedMemBytes == rhs.sharedMemBytes ) && ( paramCount == rhs.paramCount ) && ( pParams == rhs.pParams ) && ( extraCount == rhs.extraCount ) && ( pExtras == rhs.pExtras ); # endif } bool operator!=( CudaLaunchInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eCudaLaunchInfoNV; const void * pNext = {}; CudaFunctionNV function = {}; uint32_t gridDimX = {}; uint32_t gridDimY = {}; uint32_t gridDimZ = {}; uint32_t blockDimX = {}; uint32_t blockDimY = {}; uint32_t blockDimZ = {}; uint32_t sharedMemBytes = {}; size_t paramCount = {}; const void * const * pParams = {}; size_t extraCount = {}; const void * const * pExtras = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CudaLaunchInfoNV; }; # endif template <> struct CppType { using Type = CudaLaunchInfoNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkCudaModuleCreateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCudaModuleCreateInfoNV.html struct CudaModuleCreateInfoNV { using NativeType = VkCudaModuleCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCudaModuleCreateInfoNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dataSize{ dataSize_ } , pData{ pData_ } { } VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; CudaModuleCreateInfoNV( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : CudaModuleCreateInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template CudaModuleCreateInfoNV( ArrayProxyNoTemporaries const & data_, const void * pNext_ = nullptr ) : pNext( pNext_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CudaModuleCreateInfoNV & operator=( CudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CudaModuleCreateInfoNV & operator=( VkCudaModuleCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setDataSize( size_t dataSize_ ) & VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV && setDataSize( size_t dataSize_ ) && VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV & setPData( const void * pData_ ) & VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } VULKAN_HPP_CONSTEXPR_14 CudaModuleCreateInfoNV && setPData( const void * pData_ ) && VULKAN_HPP_NOEXCEPT { pData = pData_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template CudaModuleCreateInfoNV & setData( ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof( T ); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCudaModuleCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaModuleCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCudaModuleCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCudaModuleCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dataSize, pData ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CudaModuleCreateInfoNV const & ) const = default; # else bool operator==( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } bool operator!=( CudaModuleCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eCudaModuleCreateInfoNV; const void * pNext = {}; size_t dataSize = {}; const void * pData = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CudaModuleCreateInfoNV; }; # endif template <> struct CppType { using Type = CudaModuleCreateInfoNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkCustomResolveCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkCustomResolveCreateInfoEXT.html struct CustomResolveCreateInfoEXT { using NativeType = VkCustomResolveCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCustomResolveCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CustomResolveCreateInfoEXT( Bool32 customResolve_ = {}, uint32_t colorAttachmentCount_ = {}, const Format * pColorAttachmentFormats_ = {}, Format depthAttachmentFormat_ = Format::eUndefined, Format stencilAttachmentFormat_ = Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , customResolve{ customResolve_ } , colorAttachmentCount{ colorAttachmentCount_ } , pColorAttachmentFormats{ pColorAttachmentFormats_ } , depthAttachmentFormat{ depthAttachmentFormat_ } , stencilAttachmentFormat{ stencilAttachmentFormat_ } { } VULKAN_HPP_CONSTEXPR CustomResolveCreateInfoEXT( CustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; CustomResolveCreateInfoEXT( VkCustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : CustomResolveCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CustomResolveCreateInfoEXT( Bool32 customResolve_, ArrayProxyNoTemporaries const & colorAttachmentFormats_, Format depthAttachmentFormat_ = Format::eUndefined, Format stencilAttachmentFormat_ = Format::eUndefined, const void * pNext_ = nullptr ) : pNext( pNext_ ) , customResolve( customResolve_ ) , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) , pColorAttachmentFormats( colorAttachmentFormats_.data() ) , depthAttachmentFormat( depthAttachmentFormat_ ) , stencilAttachmentFormat( stencilAttachmentFormat_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ CustomResolveCreateInfoEXT & operator=( CustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ CustomResolveCreateInfoEXT & operator=( VkCustomResolveCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setCustomResolve( Bool32 customResolve_ ) & VULKAN_HPP_NOEXCEPT { customResolve = customResolve_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setCustomResolve( Bool32 customResolve_ ) && VULKAN_HPP_NOEXCEPT { customResolve = customResolve_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) & VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setColorAttachmentCount( uint32_t colorAttachmentCount_ ) && VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) & VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setPColorAttachmentFormats( const Format * pColorAttachmentFormats_ ) && VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) CustomResolveCreateInfoEXT & setColorAttachmentFormats( ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); pColorAttachmentFormats = colorAttachmentFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setDepthAttachmentFormat( Format depthAttachmentFormat_ ) & VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setDepthAttachmentFormat( Format depthAttachmentFormat_ ) && VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT & setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) & VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 CustomResolveCreateInfoEXT && setStencilAttachmentFormat( Format stencilAttachmentFormat_ ) && VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkCustomResolveCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCustomResolveCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkCustomResolveCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkCustomResolveCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, customResolve, colorAttachmentCount, pColorAttachmentFormats, depthAttachmentFormat, stencilAttachmentFormat ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( CustomResolveCreateInfoEXT const & ) const = default; #else bool operator==( CustomResolveCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customResolve == rhs.customResolve ) && ( colorAttachmentCount == rhs.colorAttachmentCount ) && ( pColorAttachmentFormats == rhs.pColorAttachmentFormats ) && ( depthAttachmentFormat == rhs.depthAttachmentFormat ) && ( stencilAttachmentFormat == rhs.stencilAttachmentFormat ); # endif } bool operator!=( CustomResolveCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eCustomResolveCreateInfoEXT; const void * pNext = {}; Bool32 customResolve = {}; uint32_t colorAttachmentCount = {}; const Format * pColorAttachmentFormats = {}; Format depthAttachmentFormat = Format::eUndefined; Format stencilAttachmentFormat = Format::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = CustomResolveCreateInfoEXT; }; #endif template <> struct CppType { using Type = CustomResolveCreateInfoEXT; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkD3D12FenceSubmitInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkD3D12FenceSubmitInfoKHR.html struct D3D12FenceSubmitInfoKHR { using NativeType = VkD3D12FenceSubmitInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t * pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ } , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } , signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ } , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } { } VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : D3D12FenceSubmitInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) D3D12FenceSubmitInfoKHR( ArrayProxyNoTemporaries const & waitSemaphoreValues_, ArrayProxyNoTemporaries const & signalSemaphoreValues_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , waitSemaphoreValuesCount( static_cast( waitSemaphoreValues_.size() ) ) , pWaitSemaphoreValues( waitSemaphoreValues_.data() ) , signalSemaphoreValuesCount( static_cast( signalSemaphoreValues_.size() ) ) , pSignalSemaphoreValues( signalSemaphoreValues_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) & VULKAN_HPP_NOEXCEPT { waitSemaphoreValuesCount = waitSemaphoreValuesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR && setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) && VULKAN_HPP_NOEXCEPT { waitSemaphoreValuesCount = waitSemaphoreValuesCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) & VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR && setPWaitSemaphoreValues( const uint64_t * pWaitSemaphoreValues_ ) && VULKAN_HPP_NOEXCEPT { pWaitSemaphoreValues = pWaitSemaphoreValues_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( ArrayProxyNoTemporaries const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { waitSemaphoreValuesCount = static_cast( waitSemaphoreValues_.size() ); pWaitSemaphoreValues = waitSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) & VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = signalSemaphoreValuesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR && setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) && VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = signalSemaphoreValuesCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) & VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return *this; } VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR && setPSignalSemaphoreValues( const uint64_t * pSignalSemaphoreValues_ ) && VULKAN_HPP_NOEXCEPT { pSignalSemaphoreValues = pSignalSemaphoreValues_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( ArrayProxyNoTemporaries const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreValuesCount = static_cast( signalSemaphoreValues_.size() ); pSignalSemaphoreValues = signalSemaphoreValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkD3D12FenceSubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkD3D12FenceSubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkD3D12FenceSubmitInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, waitSemaphoreValuesCount, pWaitSemaphoreValues, signalSemaphoreValuesCount, pSignalSemaphoreValues ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( D3D12FenceSubmitInfoKHR const & ) const = default; # else bool operator==( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); # endif } bool operator!=( D3D12FenceSubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; const void * pNext = {}; uint32_t waitSemaphoreValuesCount = {}; const uint64_t * pWaitSemaphoreValues = {}; uint32_t signalSemaphoreValuesCount = {}; const uint64_t * pSignalSemaphoreValues = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = D3D12FenceSubmitInfoKHR; }; # endif template <> struct CppType { using Type = D3D12FenceSubmitInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ // wrapper struct for struct VkPhysicalDeviceDataGraphOperationSupportARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphOperationSupportARM.html struct PhysicalDeviceDataGraphOperationSupportARM { using NativeType = VkPhysicalDeviceDataGraphOperationSupportARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM( PhysicalDeviceDataGraphOperationTypeARM operationType_ = PhysicalDeviceDataGraphOperationTypeARM::eSpirvExtendedInstructionSet, std::array const & name_ = {}, uint32_t version_ = {} ) VULKAN_HPP_NOEXCEPT : operationType{ operationType_ } , name{ name_ } , version{ version_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDataGraphOperationSupportARM( VkPhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDataGraphOperationSupportARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceDataGraphOperationSupportARM( PhysicalDeviceDataGraphOperationTypeARM operationType_, std::string const & name_, uint32_t version_ = {} ) : operationType( operationType_ ), version( version_ ) { VULKAN_HPP_ASSERT( name_.size() < VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM ); # if defined( _WIN32 ) strncpy_s( name, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.data(), name_.size() ); # else strncpy( name, name_.data(), std::min( VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.size() ) ); # endif } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PhysicalDeviceDataGraphOperationSupportARM & operator=( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDataGraphOperationSupportARM & operator=( VkPhysicalDeviceDataGraphOperationSupportARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM & setOperationType( PhysicalDeviceDataGraphOperationTypeARM operationType_ ) & VULKAN_HPP_NOEXCEPT { operationType = operationType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM && setOperationType( PhysicalDeviceDataGraphOperationTypeARM operationType_ ) && VULKAN_HPP_NOEXCEPT { operationType = operationType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM & setName( std::array name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM && setName( std::array name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceDataGraphOperationSupportARM & setName( std::string const & name_ ) VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( name_.size() < VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM ); # if defined( _WIN32 ) strncpy_s( name, VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.data(), name_.size() ); # else strncpy( name, name_.data(), std::min( VK_MAX_PHYSICAL_DEVICE_DATA_GRAPH_OPERATION_SET_NAME_SIZE_ARM, name_.size() ) ); # endif return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM & setVersion( uint32_t version_ ) & VULKAN_HPP_NOEXCEPT { version = version_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphOperationSupportARM && setVersion( uint32_t version_ ) && VULKAN_HPP_NOEXCEPT { version = version_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDataGraphOperationSupportARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphOperationSupportARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphOperationSupportARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphOperationSupportARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( operationType, name, version ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = operationType <=> rhs.operationType; cmp != 0 ) return cmp; if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = version <=> rhs.version; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( operationType == rhs.operationType ) && ( strcmp( name, rhs.name ) == 0 ) && ( version == rhs.version ); } bool operator!=( PhysicalDeviceDataGraphOperationSupportARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: PhysicalDeviceDataGraphOperationTypeARM operationType = PhysicalDeviceDataGraphOperationTypeARM::eSpirvExtendedInstructionSet; ArrayWrapper1D name = {}; uint32_t version = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDataGraphOperationSupportARM; }; #endif // wrapper struct for struct VkDataGraphPipelineBuiltinModelCreateInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineBuiltinModelCreateInfoQCOM.html struct DataGraphPipelineBuiltinModelCreateInfoQCOM { using NativeType = VkDataGraphPipelineBuiltinModelCreateInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineBuiltinModelCreateInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM( const PhysicalDeviceDataGraphOperationSupportARM * pOperation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pOperation{ pOperation_ } { } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineBuiltinModelCreateInfoQCOM( VkDataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineBuiltinModelCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineBuiltinModelCreateInfoQCOM & operator=( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineBuiltinModelCreateInfoQCOM & operator=( VkDataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM & setPOperation( const PhysicalDeviceDataGraphOperationSupportARM * pOperation_ ) & VULKAN_HPP_NOEXCEPT { pOperation = pOperation_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineBuiltinModelCreateInfoQCOM && setPOperation( const PhysicalDeviceDataGraphOperationSupportARM * pOperation_ ) && VULKAN_HPP_NOEXCEPT { pOperation = pOperation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineBuiltinModelCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pOperation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineBuiltinModelCreateInfoQCOM const & ) const = default; #else bool operator==( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pOperation == rhs.pOperation ); # endif } bool operator!=( DataGraphPipelineBuiltinModelCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineBuiltinModelCreateInfoQCOM; const void * pNext = {}; const PhysicalDeviceDataGraphOperationSupportARM * pOperation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineBuiltinModelCreateInfoQCOM; }; #endif template <> struct CppType { using Type = DataGraphPipelineBuiltinModelCreateInfoQCOM; }; // wrapper struct for struct VkDataGraphPipelineCompilerControlCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineCompilerControlCreateInfoARM.html struct DataGraphPipelineCompilerControlCreateInfoARM { using NativeType = VkDataGraphPipelineCompilerControlCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineCompilerControlCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineCompilerControlCreateInfoARM( const char * pVendorOptions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pVendorOptions{ pVendorOptions_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineCompilerControlCreateInfoARM( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineCompilerControlCreateInfoARM( VkDataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineCompilerControlCreateInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineCompilerControlCreateInfoARM & operator=( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineCompilerControlCreateInfoARM & operator=( VkDataGraphPipelineCompilerControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM & setPVendorOptions( const char * pVendorOptions_ ) & VULKAN_HPP_NOEXCEPT { pVendorOptions = pVendorOptions_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCompilerControlCreateInfoARM && setPVendorOptions( const char * pVendorOptions_ ) && VULKAN_HPP_NOEXCEPT { pVendorOptions = pVendorOptions_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineCompilerControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineCompilerControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineCompilerControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineCompilerControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pVendorOptions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( pVendorOptions != rhs.pVendorOptions ) if ( auto cmp = strcmp( pVendorOptions, rhs.pVendorOptions ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pVendorOptions == rhs.pVendorOptions ) || ( strcmp( pVendorOptions, rhs.pVendorOptions ) == 0 ) ); } bool operator!=( DataGraphPipelineCompilerControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDataGraphPipelineCompilerControlCreateInfoARM; const void * pNext = {}; const char * pVendorOptions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineCompilerControlCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineCompilerControlCreateInfoARM; }; // wrapper struct for struct VkDataGraphPipelineConstantARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineConstantARM.html struct DataGraphPipelineConstantARM { using NativeType = VkDataGraphPipelineConstantARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineConstantARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantARM( uint32_t id_ = {}, const void * pConstantData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , id{ id_ } , pConstantData{ pConstantData_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantARM( DataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineConstantARM( VkDataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineConstantARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineConstantARM & operator=( DataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineConstantARM & operator=( VkDataGraphPipelineConstantARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setId( uint32_t id_ ) & VULKAN_HPP_NOEXCEPT { id = id_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM && setId( uint32_t id_ ) && VULKAN_HPP_NOEXCEPT { id = id_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM & setPConstantData( const void * pConstantData_ ) & VULKAN_HPP_NOEXCEPT { pConstantData = pConstantData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantARM && setPConstantData( const void * pConstantData_ ) && VULKAN_HPP_NOEXCEPT { pConstantData = pConstantData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineConstantARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineConstantARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineConstantARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineConstantARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, id, pConstantData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineConstantARM const & ) const = default; #else bool operator==( DataGraphPipelineConstantARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( pConstantData == rhs.pConstantData ); # endif } bool operator!=( DataGraphPipelineConstantARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineConstantARM; const void * pNext = {}; uint32_t id = {}; const void * pConstantData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineConstantARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineConstantARM; }; // wrapper struct for struct VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM.html struct DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM { using NativeType = VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( uint32_t dimension_ = {}, uint32_t zeroCount_ = {}, uint32_t groupSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dimension{ dimension_ } , zeroCount{ zeroCount_ } , groupSize{ groupSize_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & operator=( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & operator=( VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setDimension( uint32_t dimension_ ) & VULKAN_HPP_NOEXCEPT { dimension = dimension_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM && setDimension( uint32_t dimension_ ) && VULKAN_HPP_NOEXCEPT { dimension = dimension_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setZeroCount( uint32_t zeroCount_ ) & VULKAN_HPP_NOEXCEPT { zeroCount = zeroCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM && setZeroCount( uint32_t zeroCount_ ) && VULKAN_HPP_NOEXCEPT { zeroCount = zeroCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM & setGroupSize( uint32_t groupSize_ ) & VULKAN_HPP_NOEXCEPT { groupSize = groupSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM && setGroupSize( uint32_t groupSize_ ) && VULKAN_HPP_NOEXCEPT { groupSize = groupSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dimension, zeroCount, groupSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dimension == rhs.dimension ) && ( zeroCount == rhs.zeroCount ) && ( groupSize == rhs.groupSize ); # endif } bool operator!=( DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM; const void * pNext = {}; uint32_t dimension = {}; uint32_t zeroCount = {}; uint32_t groupSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineConstantTensorSemiStructuredSparsityInfoARM; }; // wrapper struct for struct VkDataGraphPipelineResourceInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineResourceInfoARM.html struct DataGraphPipelineResourceInfoARM { using NativeType = VkDataGraphPipelineResourceInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineResourceInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineResourceInfoARM( uint32_t descriptorSet_ = {}, uint32_t binding_ = {}, uint32_t arrayElement_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorSet{ descriptorSet_ } , binding{ binding_ } , arrayElement{ arrayElement_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineResourceInfoARM( DataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineResourceInfoARM( VkDataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineResourceInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineResourceInfoARM & operator=( DataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineResourceInfoARM & operator=( VkDataGraphPipelineResourceInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setDescriptorSet( uint32_t descriptorSet_ ) & VULKAN_HPP_NOEXCEPT { descriptorSet = descriptorSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM && setDescriptorSet( uint32_t descriptorSet_ ) && VULKAN_HPP_NOEXCEPT { descriptorSet = descriptorSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setBinding( uint32_t binding_ ) & VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM && setBinding( uint32_t binding_ ) && VULKAN_HPP_NOEXCEPT { binding = binding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM & setArrayElement( uint32_t arrayElement_ ) & VULKAN_HPP_NOEXCEPT { arrayElement = arrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineResourceInfoARM && setArrayElement( uint32_t arrayElement_ ) && VULKAN_HPP_NOEXCEPT { arrayElement = arrayElement_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineResourceInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineResourceInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineResourceInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineResourceInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorSet, binding, arrayElement ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineResourceInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineResourceInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSet == rhs.descriptorSet ) && ( binding == rhs.binding ) && ( arrayElement == rhs.arrayElement ); # endif } bool operator!=( DataGraphPipelineResourceInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineResourceInfoARM; const void * pNext = {}; uint32_t descriptorSet = {}; uint32_t binding = {}; uint32_t arrayElement = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineResourceInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineResourceInfoARM; }; // wrapper struct for struct VkDataGraphPipelineCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineCreateInfoARM.html struct DataGraphPipelineCreateInfoARM { using NativeType = VkDataGraphPipelineCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineCreateInfoARM( PipelineCreateFlags2KHR flags_ = {}, PipelineLayout layout_ = {}, uint32_t resourceInfoCount_ = {}, const DataGraphPipelineResourceInfoARM * pResourceInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , layout{ layout_ } , resourceInfoCount{ resourceInfoCount_ } , pResourceInfos{ pResourceInfos_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineCreateInfoARM( DataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineCreateInfoARM( VkDataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineCreateInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineCreateInfoARM( PipelineCreateFlags2KHR flags_, PipelineLayout layout_, ArrayProxyNoTemporaries const & resourceInfos_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , layout( layout_ ) , resourceInfoCount( static_cast( resourceInfos_.size() ) ) , pResourceInfos( resourceInfos_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DataGraphPipelineCreateInfoARM & operator=( DataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineCreateInfoARM & operator=( VkDataGraphPipelineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setFlags( PipelineCreateFlags2KHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM && setFlags( PipelineCreateFlags2KHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setResourceInfoCount( uint32_t resourceInfoCount_ ) & VULKAN_HPP_NOEXCEPT { resourceInfoCount = resourceInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM && setResourceInfoCount( uint32_t resourceInfoCount_ ) && VULKAN_HPP_NOEXCEPT { resourceInfoCount = resourceInfoCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM & setPResourceInfos( const DataGraphPipelineResourceInfoARM * pResourceInfos_ ) & VULKAN_HPP_NOEXCEPT { pResourceInfos = pResourceInfos_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineCreateInfoARM && setPResourceInfos( const DataGraphPipelineResourceInfoARM * pResourceInfos_ ) && VULKAN_HPP_NOEXCEPT { pResourceInfos = pResourceInfos_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineCreateInfoARM & setResourceInfos( ArrayProxyNoTemporaries const & resourceInfos_ ) VULKAN_HPP_NOEXCEPT { resourceInfoCount = static_cast( resourceInfos_.size() ); pResourceInfos = resourceInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, layout, resourceInfoCount, pResourceInfos ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineCreateInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( layout == rhs.layout ) && ( resourceInfoCount == rhs.resourceInfoCount ) && ( pResourceInfos == rhs.pResourceInfos ); # endif } bool operator!=( DataGraphPipelineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineCreateInfoARM; const void * pNext = {}; PipelineCreateFlags2KHR flags = {}; PipelineLayout layout = {}; uint32_t resourceInfoCount = {}; const DataGraphPipelineResourceInfoARM * pResourceInfos = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineCreateInfoARM; }; // wrapper struct for struct VkDataGraphPipelineDispatchInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineDispatchInfoARM.html struct DataGraphPipelineDispatchInfoARM { using NativeType = VkDataGraphPipelineDispatchInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineDispatchInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineDispatchInfoARM( DataGraphPipelineDispatchFlagsARM flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineDispatchInfoARM( DataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineDispatchInfoARM( VkDataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineDispatchInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineDispatchInfoARM & operator=( DataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineDispatchInfoARM & operator=( VkDataGraphPipelineDispatchInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM & setFlags( DataGraphPipelineDispatchFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineDispatchInfoARM && setFlags( DataGraphPipelineDispatchFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineDispatchInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineDispatchInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineDispatchInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineDispatchInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineDispatchInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineDispatchInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( DataGraphPipelineDispatchInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineDispatchInfoARM; void * pNext = {}; DataGraphPipelineDispatchFlagsARM flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineDispatchInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineDispatchInfoARM; }; // wrapper struct for struct VkDataGraphPipelineIdentifierCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineIdentifierCreateInfoARM.html struct DataGraphPipelineIdentifierCreateInfoARM { using NativeType = VkDataGraphPipelineIdentifierCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineIdentifierCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineIdentifierCreateInfoARM( uint32_t identifierSize_ = {}, const uint8_t * pIdentifier_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , identifierSize{ identifierSize_ } , pIdentifier{ pIdentifier_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineIdentifierCreateInfoARM( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineIdentifierCreateInfoARM( VkDataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineIdentifierCreateInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineIdentifierCreateInfoARM( ArrayProxyNoTemporaries const & identifier_, const void * pNext_ = nullptr ) : pNext( pNext_ ), identifierSize( static_cast( identifier_.size() ) ), pIdentifier( identifier_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DataGraphPipelineIdentifierCreateInfoARM & operator=( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineIdentifierCreateInfoARM & operator=( VkDataGraphPipelineIdentifierCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setIdentifierSize( uint32_t identifierSize_ ) & VULKAN_HPP_NOEXCEPT { identifierSize = identifierSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM && setIdentifierSize( uint32_t identifierSize_ ) && VULKAN_HPP_NOEXCEPT { identifierSize = identifierSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM & setPIdentifier( const uint8_t * pIdentifier_ ) & VULKAN_HPP_NOEXCEPT { pIdentifier = pIdentifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineIdentifierCreateInfoARM && setPIdentifier( const uint8_t * pIdentifier_ ) && VULKAN_HPP_NOEXCEPT { pIdentifier = pIdentifier_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineIdentifierCreateInfoARM & setIdentifier( ArrayProxyNoTemporaries const & identifier_ ) VULKAN_HPP_NOEXCEPT { identifierSize = static_cast( identifier_.size() ); pIdentifier = identifier_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineIdentifierCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineIdentifierCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineIdentifierCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineIdentifierCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, identifierSize, pIdentifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineIdentifierCreateInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( identifierSize == rhs.identifierSize ) && ( pIdentifier == rhs.pIdentifier ); # endif } bool operator!=( DataGraphPipelineIdentifierCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineIdentifierCreateInfoARM; const void * pNext = {}; uint32_t identifierSize = {}; const uint8_t * pIdentifier = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineIdentifierCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineIdentifierCreateInfoARM; }; // wrapper struct for struct VkDataGraphPipelineInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineInfoARM.html struct DataGraphPipelineInfoARM { using NativeType = VkDataGraphPipelineInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineInfoARM( Pipeline dataGraphPipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dataGraphPipeline{ dataGraphPipeline_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineInfoARM( DataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineInfoARM( VkDataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineInfoARM & operator=( DataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineInfoARM & operator=( VkDataGraphPipelineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM & setDataGraphPipeline( Pipeline dataGraphPipeline_ ) & VULKAN_HPP_NOEXCEPT { dataGraphPipeline = dataGraphPipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineInfoARM && setDataGraphPipeline( Pipeline dataGraphPipeline_ ) && VULKAN_HPP_NOEXCEPT { dataGraphPipeline = dataGraphPipeline_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dataGraphPipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraphPipeline == rhs.dataGraphPipeline ); # endif } bool operator!=( DataGraphPipelineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineInfoARM; const void * pNext = {}; Pipeline dataGraphPipeline = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineInfoARM; }; // wrapper struct for struct VkDataGraphPipelinePropertyQueryResultARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelinePropertyQueryResultARM.html struct DataGraphPipelinePropertyQueryResultARM { using NativeType = VkDataGraphPipelinePropertyQueryResultARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelinePropertyQueryResultARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyARM property_ = DataGraphPipelinePropertyARM::eCreationLog, Bool32 isText_ = {}, size_t dataSize_ = {}, void * pData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , property{ property_ } , isText{ isText_ } , dataSize{ dataSize_ } , pData{ pData_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelinePropertyQueryResultARM( VkDataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelinePropertyQueryResultARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DataGraphPipelinePropertyQueryResultARM( DataGraphPipelinePropertyARM property_, Bool32 isText_, ArrayProxyNoTemporaries const & data_, void * pNext_ = nullptr ) : pNext( pNext_ ), property( property_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DataGraphPipelinePropertyQueryResultARM & operator=( DataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelinePropertyQueryResultARM & operator=( VkDataGraphPipelinePropertyQueryResultARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setProperty( DataGraphPipelinePropertyARM property_ ) & VULKAN_HPP_NOEXCEPT { property = property_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM && setProperty( DataGraphPipelinePropertyARM property_ ) && VULKAN_HPP_NOEXCEPT { property = property_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setIsText( Bool32 isText_ ) & VULKAN_HPP_NOEXCEPT { isText = isText_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM && setIsText( Bool32 isText_ ) && VULKAN_HPP_NOEXCEPT { isText = isText_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setDataSize( size_t dataSize_ ) & VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM && setDataSize( size_t dataSize_ ) && VULKAN_HPP_NOEXCEPT { dataSize = dataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM & setPData( void * pData_ ) & VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelinePropertyQueryResultARM && setPData( void * pData_ ) && VULKAN_HPP_NOEXCEPT { pData = pData_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DataGraphPipelinePropertyQueryResultARM & setData( ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT { dataSize = data_.size() * sizeof( T ); pData = data_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelinePropertyQueryResultARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelinePropertyQueryResultARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelinePropertyQueryResultARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelinePropertyQueryResultARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, property, isText, dataSize, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelinePropertyQueryResultARM const & ) const = default; #else bool operator==( DataGraphPipelinePropertyQueryResultARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( property == rhs.property ) && ( isText == rhs.isText ) && ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); # endif } bool operator!=( DataGraphPipelinePropertyQueryResultARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelinePropertyQueryResultARM; void * pNext = {}; DataGraphPipelinePropertyARM property = DataGraphPipelinePropertyARM::eCreationLog; Bool32 isText = {}; size_t dataSize = {}; void * pData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelinePropertyQueryResultARM; }; #endif template <> struct CppType { using Type = DataGraphPipelinePropertyQueryResultARM; }; // wrapper struct for struct VkDataGraphPipelineSessionBindPointRequirementARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionBindPointRequirementARM.html struct DataGraphPipelineSessionBindPointRequirementARM { using NativeType = VkDataGraphPipelineSessionBindPointRequirementARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineSessionBindPointRequirementARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementARM( DataGraphPipelineSessionBindPointARM bindPoint_ = DataGraphPipelineSessionBindPointARM::eTransient, DataGraphPipelineSessionBindPointTypeARM bindPointType_ = DataGraphPipelineSessionBindPointTypeARM::eMemory, uint32_t numObjects_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , bindPoint{ bindPoint_ } , bindPointType{ bindPointType_ } , numObjects{ numObjects_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementARM( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineSessionBindPointRequirementARM( VkDataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineSessionBindPointRequirementARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineSessionBindPointRequirementARM & operator=( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineSessionBindPointRequirementARM & operator=( VkDataGraphPipelineSessionBindPointRequirementARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDataGraphPipelineSessionBindPointRequirementARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, bindPoint, bindPointType, numObjects ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineSessionBindPointRequirementARM const & ) const = default; #else bool operator==( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindPoint == rhs.bindPoint ) && ( bindPointType == rhs.bindPointType ) && ( numObjects == rhs.numObjects ); # endif } bool operator!=( DataGraphPipelineSessionBindPointRequirementARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineSessionBindPointRequirementARM; void * pNext = {}; DataGraphPipelineSessionBindPointARM bindPoint = DataGraphPipelineSessionBindPointARM::eTransient; DataGraphPipelineSessionBindPointTypeARM bindPointType = DataGraphPipelineSessionBindPointTypeARM::eMemory; uint32_t numObjects = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineSessionBindPointRequirementARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineSessionBindPointRequirementARM; }; // wrapper struct for struct VkDataGraphPipelineSessionBindPointRequirementsInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionBindPointRequirementsInfoARM.html struct DataGraphPipelineSessionBindPointRequirementsInfoARM { using NativeType = VkDataGraphPipelineSessionBindPointRequirementsInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineSessionBindPointRequirementsInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementsInfoARM( DataGraphPipelineSessionARM session_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , session{ session_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionBindPointRequirementsInfoARM( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineSessionBindPointRequirementsInfoARM( VkDataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineSessionBindPointRequirementsInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineSessionBindPointRequirementsInfoARM & operator=( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineSessionBindPointRequirementsInfoARM & operator=( VkDataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM & setSession( DataGraphPipelineSessionARM session_ ) & VULKAN_HPP_NOEXCEPT { session = session_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionBindPointRequirementsInfoARM && setSession( DataGraphPipelineSessionARM session_ ) && VULKAN_HPP_NOEXCEPT { session = session_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineSessionBindPointRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, session ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineSessionBindPointRequirementsInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session ); # endif } bool operator!=( DataGraphPipelineSessionBindPointRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineSessionBindPointRequirementsInfoARM; const void * pNext = {}; DataGraphPipelineSessionARM session = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineSessionBindPointRequirementsInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineSessionBindPointRequirementsInfoARM; }; // wrapper struct for struct VkDataGraphPipelineSessionCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionCreateInfoARM.html struct DataGraphPipelineSessionCreateInfoARM { using NativeType = VkDataGraphPipelineSessionCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineSessionCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionCreateInfoARM( DataGraphPipelineSessionCreateFlagsARM flags_ = {}, Pipeline dataGraphPipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , dataGraphPipeline{ dataGraphPipeline_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionCreateInfoARM( DataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineSessionCreateInfoARM( VkDataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineSessionCreateInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineSessionCreateInfoARM & operator=( DataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineSessionCreateInfoARM & operator=( VkDataGraphPipelineSessionCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setFlags( DataGraphPipelineSessionCreateFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM && setFlags( DataGraphPipelineSessionCreateFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM & setDataGraphPipeline( Pipeline dataGraphPipeline_ ) & VULKAN_HPP_NOEXCEPT { dataGraphPipeline = dataGraphPipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionCreateInfoARM && setDataGraphPipeline( Pipeline dataGraphPipeline_ ) && VULKAN_HPP_NOEXCEPT { dataGraphPipeline = dataGraphPipeline_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineSessionCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineSessionCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, dataGraphPipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineSessionCreateInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineSessionCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dataGraphPipeline == rhs.dataGraphPipeline ); # endif } bool operator!=( DataGraphPipelineSessionCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineSessionCreateInfoARM; const void * pNext = {}; DataGraphPipelineSessionCreateFlagsARM flags = {}; Pipeline dataGraphPipeline = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineSessionCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineSessionCreateInfoARM; }; // wrapper struct for struct VkDataGraphPipelineSessionMemoryRequirementsInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineSessionMemoryRequirementsInfoARM.html struct DataGraphPipelineSessionMemoryRequirementsInfoARM { using NativeType = VkDataGraphPipelineSessionMemoryRequirementsInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineSessionMemoryRequirementsInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionMemoryRequirementsInfoARM( DataGraphPipelineSessionARM session_ = {}, DataGraphPipelineSessionBindPointARM bindPoint_ = DataGraphPipelineSessionBindPointARM::eTransient, uint32_t objectIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , session{ session_ } , bindPoint{ bindPoint_ } , objectIndex{ objectIndex_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineSessionMemoryRequirementsInfoARM( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineSessionMemoryRequirementsInfoARM( VkDataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineSessionMemoryRequirementsInfoARM( *reinterpret_cast( &rhs ) ) { } DataGraphPipelineSessionMemoryRequirementsInfoARM & operator=( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineSessionMemoryRequirementsInfoARM & operator=( VkDataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setSession( DataGraphPipelineSessionARM session_ ) & VULKAN_HPP_NOEXCEPT { session = session_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM && setSession( DataGraphPipelineSessionARM session_ ) && VULKAN_HPP_NOEXCEPT { session = session_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) & VULKAN_HPP_NOEXCEPT { bindPoint = bindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM && setBindPoint( DataGraphPipelineSessionBindPointARM bindPoint_ ) && VULKAN_HPP_NOEXCEPT { bindPoint = bindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM & setObjectIndex( uint32_t objectIndex_ ) & VULKAN_HPP_NOEXCEPT { objectIndex = objectIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineSessionMemoryRequirementsInfoARM && setObjectIndex( uint32_t objectIndex_ ) && VULKAN_HPP_NOEXCEPT { objectIndex = objectIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineSessionMemoryRequirementsInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, session, bindPoint, objectIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphPipelineSessionMemoryRequirementsInfoARM const & ) const = default; #else bool operator==( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( session == rhs.session ) && ( bindPoint == rhs.bindPoint ) && ( objectIndex == rhs.objectIndex ); # endif } bool operator!=( DataGraphPipelineSessionMemoryRequirementsInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphPipelineSessionMemoryRequirementsInfoARM; const void * pNext = {}; DataGraphPipelineSessionARM session = {}; DataGraphPipelineSessionBindPointARM bindPoint = DataGraphPipelineSessionBindPointARM::eTransient; uint32_t objectIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineSessionMemoryRequirementsInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineSessionMemoryRequirementsInfoARM; }; // wrapper struct for struct VkDataGraphPipelineShaderModuleCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphPipelineShaderModuleCreateInfoARM.html struct DataGraphPipelineShaderModuleCreateInfoARM { using NativeType = VkDataGraphPipelineShaderModuleCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphPipelineShaderModuleCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphPipelineShaderModuleCreateInfoARM( ShaderModule module_ = {}, const char * pName_ = {}, const SpecializationInfo * pSpecializationInfo_ = {}, uint32_t constantCount_ = {}, const DataGraphPipelineConstantARM * pConstants_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , module{ module_ } , pName{ pName_ } , pSpecializationInfo{ pSpecializationInfo_ } , constantCount{ constantCount_ } , pConstants{ pConstants_ } { } VULKAN_HPP_CONSTEXPR DataGraphPipelineShaderModuleCreateInfoARM( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphPipelineShaderModuleCreateInfoARM( VkDataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphPipelineShaderModuleCreateInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineShaderModuleCreateInfoARM( ShaderModule module_, const char * pName_, const SpecializationInfo * pSpecializationInfo_, ArrayProxyNoTemporaries const & constants_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , module( module_ ) , pName( pName_ ) , pSpecializationInfo( pSpecializationInfo_ ) , constantCount( static_cast( constants_.size() ) ) , pConstants( constants_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DataGraphPipelineShaderModuleCreateInfoARM & operator=( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphPipelineShaderModuleCreateInfoARM & operator=( VkDataGraphPipelineShaderModuleCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setModule( ShaderModule module_ ) & VULKAN_HPP_NOEXCEPT { module = module_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setModule( ShaderModule module_ ) && VULKAN_HPP_NOEXCEPT { module = module_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPName( const char * pName_ ) & VULKAN_HPP_NOEXCEPT { pName = pName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setPName( const char * pName_ ) && VULKAN_HPP_NOEXCEPT { pName = pName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) & VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setPSpecializationInfo( const SpecializationInfo * pSpecializationInfo_ ) && VULKAN_HPP_NOEXCEPT { pSpecializationInfo = pSpecializationInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setConstantCount( uint32_t constantCount_ ) & VULKAN_HPP_NOEXCEPT { constantCount = constantCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setConstantCount( uint32_t constantCount_ ) && VULKAN_HPP_NOEXCEPT { constantCount = constantCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM & setPConstants( const DataGraphPipelineConstantARM * pConstants_ ) & VULKAN_HPP_NOEXCEPT { pConstants = pConstants_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphPipelineShaderModuleCreateInfoARM && setPConstants( const DataGraphPipelineConstantARM * pConstants_ ) && VULKAN_HPP_NOEXCEPT { pConstants = pConstants_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphPipelineShaderModuleCreateInfoARM & setConstants( ArrayProxyNoTemporaries const & constants_ ) VULKAN_HPP_NOEXCEPT { constantCount = static_cast( constants_.size() ); pConstants = constants_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphPipelineShaderModuleCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineShaderModuleCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphPipelineShaderModuleCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphPipelineShaderModuleCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, module, pName, pSpecializationInfo, constantCount, pConstants ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = module <=> rhs.module; cmp != 0 ) return cmp; if ( pName != rhs.pName ) if ( auto cmp = strcmp( pName, rhs.pName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = pSpecializationInfo <=> rhs.pSpecializationInfo; cmp != 0 ) return cmp; if ( auto cmp = constantCount <=> rhs.constantCount; cmp != 0 ) return cmp; if ( auto cmp = pConstants <=> rhs.pConstants; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( module == rhs.module ) && ( ( pName == rhs.pName ) || ( strcmp( pName, rhs.pName ) == 0 ) ) && ( pSpecializationInfo == rhs.pSpecializationInfo ) && ( constantCount == rhs.constantCount ) && ( pConstants == rhs.pConstants ); } bool operator!=( DataGraphPipelineShaderModuleCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDataGraphPipelineShaderModuleCreateInfoARM; const void * pNext = {}; ShaderModule module = {}; const char * pName = {}; const SpecializationInfo * pSpecializationInfo = {}; uint32_t constantCount = {}; const DataGraphPipelineConstantARM * pConstants = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphPipelineShaderModuleCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphPipelineShaderModuleCreateInfoARM; }; // wrapper struct for struct VkPhysicalDeviceDataGraphProcessingEngineARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphProcessingEngineARM.html struct PhysicalDeviceDataGraphProcessingEngineARM { using NativeType = VkPhysicalDeviceDataGraphProcessingEngineARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphProcessingEngineARM( PhysicalDeviceDataGraphProcessingEngineTypeARM type_ = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault, Bool32 isForeign_ = {} ) VULKAN_HPP_NOEXCEPT : type{ type_ } , isForeign{ isForeign_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphProcessingEngineARM( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDataGraphProcessingEngineARM( VkPhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDataGraphProcessingEngineARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDataGraphProcessingEngineARM & operator=( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDataGraphProcessingEngineARM & operator=( VkPhysicalDeviceDataGraphProcessingEngineARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM & setType( PhysicalDeviceDataGraphProcessingEngineTypeARM type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM && setType( PhysicalDeviceDataGraphProcessingEngineTypeARM type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM & setIsForeign( Bool32 isForeign_ ) & VULKAN_HPP_NOEXCEPT { isForeign = isForeign_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphProcessingEngineARM && setIsForeign( Bool32 isForeign_ ) && VULKAN_HPP_NOEXCEPT { isForeign = isForeign_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDataGraphProcessingEngineARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphProcessingEngineARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphProcessingEngineARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphProcessingEngineARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( type, isForeign ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDataGraphProcessingEngineARM const & ) const = default; #else bool operator==( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( type == rhs.type ) && ( isForeign == rhs.isForeign ); # endif } bool operator!=( PhysicalDeviceDataGraphProcessingEngineARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: PhysicalDeviceDataGraphProcessingEngineTypeARM type = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault; Bool32 isForeign = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDataGraphProcessingEngineARM; }; #endif // wrapper struct for struct VkDataGraphProcessingEngineCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDataGraphProcessingEngineCreateInfoARM.html struct DataGraphProcessingEngineCreateInfoARM { using NativeType = VkDataGraphProcessingEngineCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDataGraphProcessingEngineCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DataGraphProcessingEngineCreateInfoARM( uint32_t processingEngineCount_ = {}, PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , processingEngineCount{ processingEngineCount_ } , pProcessingEngines{ pProcessingEngines_ } { } VULKAN_HPP_CONSTEXPR DataGraphProcessingEngineCreateInfoARM( DataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DataGraphProcessingEngineCreateInfoARM( VkDataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DataGraphProcessingEngineCreateInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphProcessingEngineCreateInfoARM( ArrayProxyNoTemporaries const & processingEngines_, const void * pNext_ = nullptr ) : pNext( pNext_ ), processingEngineCount( static_cast( processingEngines_.size() ) ), pProcessingEngines( processingEngines_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DataGraphProcessingEngineCreateInfoARM & operator=( DataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DataGraphProcessingEngineCreateInfoARM & operator=( VkDataGraphProcessingEngineCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM & setProcessingEngineCount( uint32_t processingEngineCount_ ) & VULKAN_HPP_NOEXCEPT { processingEngineCount = processingEngineCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM && setProcessingEngineCount( uint32_t processingEngineCount_ ) && VULKAN_HPP_NOEXCEPT { processingEngineCount = processingEngineCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM & setPProcessingEngines( PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines_ ) & VULKAN_HPP_NOEXCEPT { pProcessingEngines = pProcessingEngines_; return *this; } VULKAN_HPP_CONSTEXPR_14 DataGraphProcessingEngineCreateInfoARM && setPProcessingEngines( PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines_ ) && VULKAN_HPP_NOEXCEPT { pProcessingEngines = pProcessingEngines_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DataGraphProcessingEngineCreateInfoARM & setProcessingEngines( ArrayProxyNoTemporaries const & processingEngines_ ) VULKAN_HPP_NOEXCEPT { processingEngineCount = static_cast( processingEngines_.size() ); pProcessingEngines = processingEngines_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDataGraphProcessingEngineCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphProcessingEngineCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDataGraphProcessingEngineCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDataGraphProcessingEngineCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, processingEngineCount, pProcessingEngines ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DataGraphProcessingEngineCreateInfoARM const & ) const = default; #else bool operator==( DataGraphProcessingEngineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( processingEngineCount == rhs.processingEngineCount ) && ( pProcessingEngines == rhs.pProcessingEngines ); # endif } bool operator!=( DataGraphProcessingEngineCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDataGraphProcessingEngineCreateInfoARM; const void * pNext = {}; uint32_t processingEngineCount = {}; PhysicalDeviceDataGraphProcessingEngineARM * pProcessingEngines = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DataGraphProcessingEngineCreateInfoARM; }; #endif template <> struct CppType { using Type = DataGraphProcessingEngineCreateInfoARM; }; // wrapper struct for struct VkDebugMarkerMarkerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerMarkerInfoEXT.html struct DebugMarkerMarkerInfoEXT { using NativeType = VkDebugMarkerMarkerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pMarkerName{ pMarkerName_ } , color{ color_ } { } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugMarkerMarkerInfoEXT( *reinterpret_cast( &rhs ) ) { } DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setPMarkerName( const char * pMarkerName_ ) & VULKAN_HPP_NOEXCEPT { pMarkerName = pMarkerName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT && setPMarkerName( const char * pMarkerName_ ) && VULKAN_HPP_NOEXCEPT { pMarkerName = pMarkerName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) & VULKAN_HPP_NOEXCEPT { color = color_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT && setColor( std::array color_ ) && VULKAN_HPP_NOEXCEPT { color = color_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugMarkerMarkerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerMarkerInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugMarkerMarkerInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pMarkerName, color ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::partial_ordering operator<=>( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( pMarkerName != rhs.pMarkerName ) if ( auto cmp = strcmp( pMarkerName, rhs.pMarkerName ); cmp != 0 ) return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp; return std::partial_ordering::equivalent; } #endif bool operator==( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pMarkerName == rhs.pMarkerName ) || ( strcmp( pMarkerName, rhs.pMarkerName ) == 0 ) ) && ( color == rhs.color ); } bool operator!=( DebugMarkerMarkerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; const void * pNext = {}; const char * pMarkerName = {}; ArrayWrapper1D color = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugMarkerMarkerInfoEXT; }; #endif template <> struct CppType { using Type = DebugMarkerMarkerInfoEXT; }; // wrapper struct for struct VkDebugMarkerObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectNameInfoEXT.html struct DebugMarkerObjectNameInfoEXT { using NativeType = VkDebugMarkerObjectNameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , objectType{ objectType_ } , object{ object_ } , pObjectName{ pObjectName_ } { } VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugMarkerObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) { } DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObjectType( DebugReportObjectTypeEXT objectType_ ) & VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT && setObjectType( DebugReportObjectTypeEXT objectType_ ) && VULKAN_HPP_NOEXCEPT { objectType = objectType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) & VULKAN_HPP_NOEXCEPT { object = object_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT && setObject( uint64_t object_ ) && VULKAN_HPP_NOEXCEPT { object = object_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) & VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT && setPObjectName( const char * pObjectName_ ) && VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugMarkerObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugMarkerObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, objectType, object, pObjectName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp; if ( auto cmp = object <=> rhs.object; cmp != 0 ) return cmp; if ( pObjectName != rhs.pObjectName ) if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); } bool operator!=( DebugMarkerObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; const void * pNext = {}; DebugReportObjectTypeEXT objectType = DebugReportObjectTypeEXT::eUnknown; uint64_t object = {}; const char * pObjectName = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugMarkerObjectNameInfoEXT; }; #endif template <> struct CppType { using Type = DebugMarkerObjectNameInfoEXT; }; // wrapper struct for struct VkDebugMarkerObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugMarkerObjectTagInfoEXT.html struct DebugMarkerObjectTagInfoEXT { using NativeType = VkDebugMarkerObjectTagInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_ = DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , objectType{ objectType_ } , object{ object_ } , tagName{ tagName_ } , tagSize{ tagSize_ } , pTag{ pTag_ } { } VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugMarkerObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DebugMarkerObjectTagInfoEXT( DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, ArrayProxyNoTemporaries const & tag_, const void * pNext_ = nullptr ) : pNext( pNext_ ), objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof( T ) ), pTag( tag_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObjectType( DebugReportObjectTypeEXT objectType_ ) & VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setObjectType( DebugReportObjectTypeEXT objectType_ ) && VULKAN_HPP_NOEXCEPT { objectType = objectType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) & VULKAN_HPP_NOEXCEPT { object = object_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setObject( uint64_t object_ ) && VULKAN_HPP_NOEXCEPT { object = object_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) & VULKAN_HPP_NOEXCEPT { tagName = tagName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setTagName( uint64_t tagName_ ) && VULKAN_HPP_NOEXCEPT { tagName = tagName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) & VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setTagSize( size_t tagSize_ ) && VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & setPTag( const void * pTag_ ) & VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT && setPTag( const void * pTag_ ) && VULKAN_HPP_NOEXCEPT { pTag = pTag_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DebugMarkerObjectTagInfoEXT & setTag( ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { tagSize = tag_.size() * sizeof( T ); pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugMarkerObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugMarkerObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugMarkerObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, objectType, object, tagName, tagSize, pTag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DebugMarkerObjectTagInfoEXT const & ) const = default; #else bool operator==( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( object == rhs.object ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); # endif } bool operator!=( DebugMarkerObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; const void * pNext = {}; DebugReportObjectTypeEXT objectType = DebugReportObjectTypeEXT::eUnknown; uint64_t object = {}; uint64_t tagName = {}; size_t tagSize = {}; const void * pTag = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugMarkerObjectTagInfoEXT; }; #endif template <> struct CppType { using Type = DebugMarkerObjectTagInfoEXT; }; typedef Bool32( VKAPI_PTR * PFN_DebugReportCallbackEXT )( DebugReportFlagsEXT flags, DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage, void * pUserData ); // wrapper struct for struct VkDebugReportCallbackCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugReportCallbackCreateInfoEXT.html struct DebugReportCallbackCreateInfoEXT { using NativeType = VkDebugReportCallbackCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_ = {}, PFN_DebugReportCallbackEXT pfnCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pfnCallback{ pfnCallback_ } , pUserData{ pUserData_ } { } VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugReportCallbackCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) DebugReportCallbackCreateInfoEXT( DebugReportFlagsEXT flags_, PFN_vkDebugReportCallbackEXT pfnCallback_, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast( pfnCallback_ ), pUserData_, pNext_ ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setFlags( DebugReportFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT && setFlags( DebugReportFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_DebugReportCallbackEXT pfnCallback_ ) & VULKAN_HPP_NOEXCEPT { pfnCallback = pfnCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT && setPfnCallback( PFN_DebugReportCallbackEXT pfnCallback_ ) && VULKAN_HPP_NOEXCEPT { pfnCallback = pfnCallback_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPUserData( void * pUserData_ ) & VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT && setPUserData( void * pUserData_ ) && VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return std::move( *this ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT { return setPfnCallback( reinterpret_cast( pfnCallback_ ) ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugReportCallbackCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugReportCallbackCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pfnCallback, pUserData ); } #endif bool operator==( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnCallback == rhs.pfnCallback ) && ( pUserData == rhs.pUserData ); #endif } bool operator!=( DebugReportCallbackCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; const void * pNext = {}; DebugReportFlagsEXT flags = {}; PFN_DebugReportCallbackEXT pfnCallback = {}; void * pUserData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugReportCallbackCreateInfoEXT; }; #endif template <> struct CppType { using Type = DebugReportCallbackCreateInfoEXT; }; // wrapper struct for struct VkDebugUtilsLabelEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsLabelEXT.html struct DebugUtilsLabelEXT { using NativeType = VkDebugUtilsLabelEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pLabelName{ pLabelName_ } , color{ color_ } { } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsLabelEXT( *reinterpret_cast( &rhs ) ) {} DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setPLabelName( const char * pLabelName_ ) & VULKAN_HPP_NOEXCEPT { pLabelName = pLabelName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT && setPLabelName( const char * pLabelName_ ) && VULKAN_HPP_NOEXCEPT { pLabelName = pLabelName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & setColor( std::array color_ ) & VULKAN_HPP_NOEXCEPT { color = color_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT && setColor( std::array color_ ) && VULKAN_HPP_NOEXCEPT { color = color_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugUtilsLabelEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsLabelEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugUtilsLabelEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pLabelName, color ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::partial_ordering operator<=>( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( pLabelName != rhs.pLabelName ) if ( auto cmp = strcmp( pLabelName, rhs.pLabelName ); cmp != 0 ) return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; if ( auto cmp = color <=> rhs.color; cmp != 0 ) return cmp; return std::partial_ordering::equivalent; } #endif bool operator==( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ( pLabelName == rhs.pLabelName ) || ( strcmp( pLabelName, rhs.pLabelName ) == 0 ) ) && ( color == rhs.color ); } bool operator!=( DebugUtilsLabelEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugUtilsLabelEXT; const void * pNext = {}; const char * pLabelName = {}; ArrayWrapper1D color = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugUtilsLabelEXT; }; #endif template <> struct CppType { using Type = DebugUtilsLabelEXT; }; // wrapper struct for struct VkDebugUtilsObjectNameInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectNameInfoEXT.html struct DebugUtilsObjectNameInfoEXT { using NativeType = VkDebugUtilsObjectNameInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , objectType{ objectType_ } , objectHandle{ objectHandle_ } , pObjectName{ pObjectName_ } { } VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsObjectNameInfoEXT( *reinterpret_cast( &rhs ) ) { } DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectType( ObjectType objectType_ ) & VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT && setObjectType( ObjectType objectType_ ) && VULKAN_HPP_NOEXCEPT { objectType = objectType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) & VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT && setObjectHandle( uint64_t objectHandle_ ) && VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & setPObjectName( const char * pObjectName_ ) & VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT && setPObjectName( const char * pObjectName_ ) && VULKAN_HPP_NOEXCEPT { pObjectName = pObjectName_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugUtilsObjectNameInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsObjectNameInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugUtilsObjectNameInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, objectType, objectHandle, pObjectName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = objectType <=> rhs.objectType; cmp != 0 ) return cmp; if ( auto cmp = objectHandle <=> rhs.objectHandle; cmp != 0 ) return cmp; if ( pObjectName != rhs.pObjectName ) if ( auto cmp = strcmp( pObjectName, rhs.pObjectName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( ( pObjectName == rhs.pObjectName ) || ( strcmp( pObjectName, rhs.pObjectName ) == 0 ) ); } bool operator!=( DebugUtilsObjectNameInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; const void * pNext = {}; ObjectType objectType = ObjectType::eUnknown; uint64_t objectHandle = {}; const char * pObjectName = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugUtilsObjectNameInfoEXT; }; #endif template <> struct CppType { using Type = DebugUtilsObjectNameInfoEXT; }; // wrapper struct for struct VkDebugUtilsMessengerCallbackDataEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCallbackDataEXT.html struct DebugUtilsMessengerCallbackDataEXT { using NativeType = VkDebugUtilsMessengerCallbackDataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char * pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char * pMessage_ = {}, uint32_t queueLabelCount_ = {}, const DebugUtilsLabelEXT * pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const DebugUtilsLabelEXT * pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const DebugUtilsObjectNameInfoEXT * pObjects_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pMessageIdName{ pMessageIdName_ } , messageIdNumber{ messageIdNumber_ } , pMessage{ pMessage_ } , queueLabelCount{ queueLabelCount_ } , pQueueLabels{ pQueueLabels_ } , cmdBufLabelCount{ cmdBufLabelCount_ } , pCmdBufLabels{ pCmdBufLabels_ } , objectCount{ objectCount_ } , pObjects{ pObjects_ } { } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char * pMessageIdName_, int32_t messageIdNumber_, const char * pMessage_, ArrayProxyNoTemporaries const & queueLabels_, ArrayProxyNoTemporaries const & cmdBufLabels_ = {}, ArrayProxyNoTemporaries const & objects_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , pMessageIdName( pMessageIdName_ ) , messageIdNumber( messageIdNumber_ ) , pMessage( pMessage_ ) , queueLabelCount( static_cast( queueLabels_.size() ) ) , pQueueLabels( queueLabels_.data() ) , cmdBufLabelCount( static_cast( cmdBufLabels_.size() ) ) , pCmdBufLabels( cmdBufLabels_.data() ) , objectCount( static_cast( objects_.size() ) ) , pObjects( objects_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setFlags( DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char * pMessageIdName_ ) & VULKAN_HPP_NOEXCEPT { pMessageIdName = pMessageIdName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPMessageIdName( const char * pMessageIdName_ ) && VULKAN_HPP_NOEXCEPT { pMessageIdName = pMessageIdName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) & VULKAN_HPP_NOEXCEPT { messageIdNumber = messageIdNumber_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setMessageIdNumber( int32_t messageIdNumber_ ) && VULKAN_HPP_NOEXCEPT { messageIdNumber = messageIdNumber_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPMessage( const char * pMessage_ ) & VULKAN_HPP_NOEXCEPT { pMessage = pMessage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPMessage( const char * pMessage_ ) && VULKAN_HPP_NOEXCEPT { pMessage = pMessage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) & VULKAN_HPP_NOEXCEPT { queueLabelCount = queueLabelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setQueueLabelCount( uint32_t queueLabelCount_ ) && VULKAN_HPP_NOEXCEPT { queueLabelCount = queueLabelCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const DebugUtilsLabelEXT * pQueueLabels_ ) & VULKAN_HPP_NOEXCEPT { pQueueLabels = pQueueLabels_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPQueueLabels( const DebugUtilsLabelEXT * pQueueLabels_ ) && VULKAN_HPP_NOEXCEPT { pQueueLabels = pQueueLabels_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DebugUtilsMessengerCallbackDataEXT & setQueueLabels( ArrayProxyNoTemporaries const & queueLabels_ ) VULKAN_HPP_NOEXCEPT { queueLabelCount = static_cast( queueLabels_.size() ); pQueueLabels = queueLabels_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) & VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = cmdBufLabelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) && VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = cmdBufLabelCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const DebugUtilsLabelEXT * pCmdBufLabels_ ) & VULKAN_HPP_NOEXCEPT { pCmdBufLabels = pCmdBufLabels_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPCmdBufLabels( const DebugUtilsLabelEXT * pCmdBufLabels_ ) && VULKAN_HPP_NOEXCEPT { pCmdBufLabels = pCmdBufLabels_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( ArrayProxyNoTemporaries const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT { cmdBufLabelCount = static_cast( cmdBufLabels_.size() ); pCmdBufLabels = cmdBufLabels_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) & VULKAN_HPP_NOEXCEPT { objectCount = objectCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setObjectCount( uint32_t objectCount_ ) && VULKAN_HPP_NOEXCEPT { objectCount = objectCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & setPObjects( const DebugUtilsObjectNameInfoEXT * pObjects_ ) & VULKAN_HPP_NOEXCEPT { pObjects = pObjects_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT && setPObjects( const DebugUtilsObjectNameInfoEXT * pObjects_ ) && VULKAN_HPP_NOEXCEPT { pObjects = pObjects_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DebugUtilsMessengerCallbackDataEXT & setObjects( ArrayProxyNoTemporaries const & objects_ ) VULKAN_HPP_NOEXCEPT { objectCount = static_cast( objects_.size() ); pObjects = objects_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugUtilsMessengerCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugUtilsMessengerCallbackDataEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pMessageIdName, messageIdNumber, pMessage, queueLabelCount, pQueueLabels, cmdBufLabelCount, pCmdBufLabels, objectCount, pObjects ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( pMessageIdName != rhs.pMessageIdName ) if ( auto cmp = strcmp( pMessageIdName, rhs.pMessageIdName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = messageIdNumber <=> rhs.messageIdNumber; cmp != 0 ) return cmp; if ( pMessage != rhs.pMessage ) if ( auto cmp = strcmp( pMessage, rhs.pMessage ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = queueLabelCount <=> rhs.queueLabelCount; cmp != 0 ) return cmp; if ( auto cmp = pQueueLabels <=> rhs.pQueueLabels; cmp != 0 ) return cmp; if ( auto cmp = cmdBufLabelCount <=> rhs.cmdBufLabelCount; cmp != 0 ) return cmp; if ( auto cmp = pCmdBufLabels <=> rhs.pCmdBufLabels; cmp != 0 ) return cmp; if ( auto cmp = objectCount <=> rhs.objectCount; cmp != 0 ) return cmp; if ( auto cmp = pObjects <=> rhs.pObjects; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( ( pMessageIdName == rhs.pMessageIdName ) || ( strcmp( pMessageIdName, rhs.pMessageIdName ) == 0 ) ) && ( messageIdNumber == rhs.messageIdNumber ) && ( ( pMessage == rhs.pMessage ) || ( strcmp( pMessage, rhs.pMessage ) == 0 ) ) && ( queueLabelCount == rhs.queueLabelCount ) && ( pQueueLabels == rhs.pQueueLabels ) && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) && ( pCmdBufLabels == rhs.pCmdBufLabels ) && ( objectCount == rhs.objectCount ) && ( pObjects == rhs.pObjects ); } bool operator!=( DebugUtilsMessengerCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; const void * pNext = {}; DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; const char * pMessageIdName = {}; int32_t messageIdNumber = {}; const char * pMessage = {}; uint32_t queueLabelCount = {}; const DebugUtilsLabelEXT * pQueueLabels = {}; uint32_t cmdBufLabelCount = {}; const DebugUtilsLabelEXT * pCmdBufLabels = {}; uint32_t objectCount = {}; const DebugUtilsObjectNameInfoEXT * pObjects = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugUtilsMessengerCallbackDataEXT; }; #endif template <> struct CppType { using Type = DebugUtilsMessengerCallbackDataEXT; }; typedef Bool32( VKAPI_PTR * PFN_DebugUtilsMessengerCallbackEXT )( DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT * pCallbackData, void * pUserData ); // wrapper struct for struct VkDebugUtilsMessengerCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsMessengerCreateInfoEXT.html struct DebugUtilsMessengerCreateInfoEXT { using NativeType = VkDebugUtilsMessengerCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_ = {}, DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , messageSeverity{ messageSeverity_ } , messageType{ messageType_ } , pfnUserCallback{ pfnUserCallback_ } , pUserData{ pUserData_ } { } VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateFlagsEXT flags_, DebugUtilsMessageSeverityFlagsEXT messageSeverity_, DebugUtilsMessageTypeFlagsEXT messageType_, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : DebugUtilsMessengerCreateInfoEXT( flags_, messageSeverity_, messageType_, reinterpret_cast( pfnUserCallback_ ), pUserData_, pNext_ ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setFlags( DebugUtilsMessengerCreateFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) & VULKAN_HPP_NOEXCEPT { messageSeverity = messageSeverity_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setMessageSeverity( DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) && VULKAN_HPP_NOEXCEPT { messageSeverity = messageSeverity_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ ) & VULKAN_HPP_NOEXCEPT { messageType = messageType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setMessageType( DebugUtilsMessageTypeFlagsEXT messageType_ ) && VULKAN_HPP_NOEXCEPT { messageType = messageType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) & VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setPfnUserCallback( PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) && VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPUserData( void * pUserData_ ) & VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT && setPUserData( void * pUserData_ ) && VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return std::move( *this ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsMessengerCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugUtilsMessengerCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, messageSeverity, messageType, pfnUserCallback, pUserData ); } #endif bool operator==( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( messageSeverity == rhs.messageSeverity ) && ( messageType == rhs.messageType ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); #endif } bool operator!=( DebugUtilsMessengerCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; const void * pNext = {}; DebugUtilsMessengerCreateFlagsEXT flags = {}; DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; DebugUtilsMessageTypeFlagsEXT messageType = {}; PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {}; void * pUserData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugUtilsMessengerCreateInfoEXT; }; #endif template <> struct CppType { using Type = DebugUtilsMessengerCreateInfoEXT; }; // wrapper struct for struct VkDebugUtilsObjectTagInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDebugUtilsObjectTagInfoEXT.html struct DebugUtilsObjectTagInfoEXT { using NativeType = VkDebugUtilsObjectTagInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , objectType{ objectType_ } , objectHandle{ objectHandle_ } , tagName{ tagName_ } , tagSize{ tagSize_ } , pTag{ pTag_ } { } VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DebugUtilsObjectTagInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DebugUtilsObjectTagInfoEXT( ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, ArrayProxyNoTemporaries const & tag_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , objectType( objectType_ ) , objectHandle( objectHandle_ ) , tagName( tagName_ ) , tagSize( tag_.size() * sizeof( T ) ) , pTag( tag_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectType( ObjectType objectType_ ) & VULKAN_HPP_NOEXCEPT { objectType = objectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setObjectType( ObjectType objectType_ ) && VULKAN_HPP_NOEXCEPT { objectType = objectType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) & VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setObjectHandle( uint64_t objectHandle_ ) && VULKAN_HPP_NOEXCEPT { objectHandle = objectHandle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) & VULKAN_HPP_NOEXCEPT { tagName = tagName_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setTagName( uint64_t tagName_ ) && VULKAN_HPP_NOEXCEPT { tagName = tagName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) & VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setTagSize( size_t tagSize_ ) && VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & setPTag( const void * pTag_ ) & VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT && setPTag( const void * pTag_ ) && VULKAN_HPP_NOEXCEPT { pTag = pTag_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template DebugUtilsObjectTagInfoEXT & setTag( ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { tagSize = tag_.size() * sizeof( T ); pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDebugUtilsObjectTagInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDebugUtilsObjectTagInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDebugUtilsObjectTagInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, objectType, objectHandle, tagName, tagSize, pTag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DebugUtilsObjectTagInfoEXT const & ) const = default; #else bool operator==( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); # endif } bool operator!=( DebugUtilsObjectTagInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; const void * pNext = {}; ObjectType objectType = ObjectType::eUnknown; uint64_t objectHandle = {}; uint64_t tagName = {}; size_t tagSize = {}; const void * pTag = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DebugUtilsObjectTagInfoEXT; }; #endif template <> struct CppType { using Type = DebugUtilsObjectTagInfoEXT; }; // wrapper struct for struct VkDecompressMemoryRegionEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionEXT.html struct DecompressMemoryRegionEXT { using NativeType = VkDecompressMemoryRegionEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DecompressMemoryRegionEXT( DeviceAddress srcAddress_ = {}, DeviceAddress dstAddress_ = {}, DeviceSize compressedSize_ = {}, DeviceSize decompressedSize_ = {} ) VULKAN_HPP_NOEXCEPT : srcAddress{ srcAddress_ } , dstAddress{ dstAddress_ } , compressedSize{ compressedSize_ } , decompressedSize{ decompressedSize_ } { } VULKAN_HPP_CONSTEXPR DecompressMemoryRegionEXT( DecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DecompressMemoryRegionEXT( VkDecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DecompressMemoryRegionEXT( *reinterpret_cast( &rhs ) ) { } DecompressMemoryRegionEXT & operator=( DecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DecompressMemoryRegionEXT & operator=( VkDecompressMemoryRegionEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setSrcAddress( DeviceAddress srcAddress_ ) & VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT && setSrcAddress( DeviceAddress srcAddress_ ) && VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setDstAddress( DeviceAddress dstAddress_ ) & VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT && setDstAddress( DeviceAddress dstAddress_ ) && VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setCompressedSize( DeviceSize compressedSize_ ) & VULKAN_HPP_NOEXCEPT { compressedSize = compressedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT && setCompressedSize( DeviceSize compressedSize_ ) && VULKAN_HPP_NOEXCEPT { compressedSize = compressedSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT & setDecompressedSize( DeviceSize decompressedSize_ ) & VULKAN_HPP_NOEXCEPT { decompressedSize = decompressedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionEXT && setDecompressedSize( DeviceSize decompressedSize_ ) && VULKAN_HPP_NOEXCEPT { decompressedSize = decompressedSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDecompressMemoryRegionEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryRegionEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryRegionEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDecompressMemoryRegionEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DecompressMemoryRegionEXT const & ) const = default; #else bool operator==( DecompressMemoryRegionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && ( decompressedSize == rhs.decompressedSize ); # endif } bool operator!=( DecompressMemoryRegionEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress srcAddress = {}; DeviceAddress dstAddress = {}; DeviceSize compressedSize = {}; DeviceSize decompressedSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DecompressMemoryRegionEXT; }; #endif // wrapper struct for struct VkDecompressMemoryInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryInfoEXT.html struct DecompressMemoryInfoEXT { using NativeType = VkDecompressMemoryInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDecompressMemoryInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DecompressMemoryInfoEXT( MemoryDecompressionMethodFlagsEXT decompressionMethod_ = {}, uint32_t regionCount_ = {}, const DecompressMemoryRegionEXT * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , decompressionMethod{ decompressionMethod_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR DecompressMemoryInfoEXT( DecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DecompressMemoryInfoEXT( VkDecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DecompressMemoryInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DecompressMemoryInfoEXT( MemoryDecompressionMethodFlagsEXT decompressionMethod_, ArrayProxyNoTemporaries const & regions_, const void * pNext_ = nullptr ) : pNext( pNext_ ), decompressionMethod( decompressionMethod_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DecompressMemoryInfoEXT & operator=( DecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DecompressMemoryInfoEXT & operator=( VkDecompressMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setDecompressionMethod( MemoryDecompressionMethodFlagsEXT decompressionMethod_ ) & VULKAN_HPP_NOEXCEPT { decompressionMethod = decompressionMethod_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT && setDecompressionMethod( MemoryDecompressionMethodFlagsEXT decompressionMethod_ ) && VULKAN_HPP_NOEXCEPT { decompressionMethod = decompressionMethod_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT & setPRegions( const DecompressMemoryRegionEXT * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryInfoEXT && setPRegions( const DecompressMemoryRegionEXT * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DecompressMemoryInfoEXT & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDecompressMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDecompressMemoryInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, decompressionMethod, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DecompressMemoryInfoEXT const & ) const = default; #else bool operator==( DecompressMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethod == rhs.decompressionMethod ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( DecompressMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDecompressMemoryInfoEXT; const void * pNext = {}; MemoryDecompressionMethodFlagsEXT decompressionMethod = {}; uint32_t regionCount = {}; const DecompressMemoryRegionEXT * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DecompressMemoryInfoEXT; }; #endif template <> struct CppType { using Type = DecompressMemoryInfoEXT; }; // wrapper struct for struct VkDecompressMemoryRegionNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDecompressMemoryRegionNV.html struct DecompressMemoryRegionNV { using NativeType = VkDecompressMemoryRegionNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DeviceAddress srcAddress_ = {}, DeviceAddress dstAddress_ = {}, DeviceSize compressedSize_ = {}, DeviceSize decompressedSize_ = {}, MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT : srcAddress{ srcAddress_ } , dstAddress{ dstAddress_ } , compressedSize{ compressedSize_ } , decompressedSize{ decompressedSize_ } , decompressionMethod{ decompressionMethod_ } { } VULKAN_HPP_CONSTEXPR DecompressMemoryRegionNV( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DecompressMemoryRegionNV( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT : DecompressMemoryRegionNV( *reinterpret_cast( &rhs ) ) { } DecompressMemoryRegionNV & operator=( DecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DecompressMemoryRegionNV & operator=( VkDecompressMemoryRegionNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setSrcAddress( DeviceAddress srcAddress_ ) & VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV && setSrcAddress( DeviceAddress srcAddress_ ) && VULKAN_HPP_NOEXCEPT { srcAddress = srcAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDstAddress( DeviceAddress dstAddress_ ) & VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV && setDstAddress( DeviceAddress dstAddress_ ) && VULKAN_HPP_NOEXCEPT { dstAddress = dstAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setCompressedSize( DeviceSize compressedSize_ ) & VULKAN_HPP_NOEXCEPT { compressedSize = compressedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV && setCompressedSize( DeviceSize compressedSize_ ) && VULKAN_HPP_NOEXCEPT { compressedSize = compressedSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressedSize( DeviceSize decompressedSize_ ) & VULKAN_HPP_NOEXCEPT { decompressedSize = decompressedSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV && setDecompressedSize( DeviceSize decompressedSize_ ) && VULKAN_HPP_NOEXCEPT { decompressedSize = decompressedSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV & setDecompressionMethod( MemoryDecompressionMethodFlagsNV decompressionMethod_ ) & VULKAN_HPP_NOEXCEPT { decompressionMethod = decompressionMethod_; return *this; } VULKAN_HPP_CONSTEXPR_14 DecompressMemoryRegionNV && setDecompressionMethod( MemoryDecompressionMethodFlagsNV decompressionMethod_ ) && VULKAN_HPP_NOEXCEPT { decompressionMethod = decompressionMethod_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDecompressMemoryRegionNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryRegionNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDecompressMemoryRegionNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDecompressMemoryRegionNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcAddress, dstAddress, compressedSize, decompressedSize, decompressionMethod ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DecompressMemoryRegionNV const & ) const = default; #else bool operator==( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcAddress == rhs.srcAddress ) && ( dstAddress == rhs.dstAddress ) && ( compressedSize == rhs.compressedSize ) && ( decompressedSize == rhs.decompressedSize ) && ( decompressionMethod == rhs.decompressionMethod ); # endif } bool operator!=( DecompressMemoryRegionNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress srcAddress = {}; DeviceAddress dstAddress = {}; DeviceSize compressedSize = {}; DeviceSize decompressedSize = {}; MemoryDecompressionMethodFlagsNV decompressionMethod = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DecompressMemoryRegionNV; }; #endif // wrapper struct for struct VkDedicatedAllocationBufferCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationBufferCreateInfoNV.html struct DedicatedAllocationBufferCreateInfoNV { using NativeType = VkDedicatedAllocationBufferCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dedicatedAllocation{ dedicatedAllocation_ } { } VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast( &rhs ) ) { } DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( Bool32 dedicatedAllocation_ ) & VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV && setDedicatedAllocation( Bool32 dedicatedAllocation_ ) && VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDedicatedAllocationBufferCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationBufferCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDedicatedAllocationBufferCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DedicatedAllocationBufferCreateInfoNV const & ) const = default; #else bool operator==( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); # endif } bool operator!=( DedicatedAllocationBufferCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; const void * pNext = {}; Bool32 dedicatedAllocation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DedicatedAllocationBufferCreateInfoNV; }; #endif template <> struct CppType { using Type = DedicatedAllocationBufferCreateInfoNV; }; // wrapper struct for struct VkDedicatedAllocationImageCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationImageCreateInfoNV.html struct DedicatedAllocationImageCreateInfoNV { using NativeType = VkDedicatedAllocationImageCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dedicatedAllocation{ dedicatedAllocation_ } { } VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast( &rhs ) ) { } DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( Bool32 dedicatedAllocation_ ) & VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV && setDedicatedAllocation( Bool32 dedicatedAllocation_ ) && VULKAN_HPP_NOEXCEPT { dedicatedAllocation = dedicatedAllocation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDedicatedAllocationImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDedicatedAllocationImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DedicatedAllocationImageCreateInfoNV const & ) const = default; #else bool operator==( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocation == rhs.dedicatedAllocation ); # endif } bool operator!=( DedicatedAllocationImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; const void * pNext = {}; Bool32 dedicatedAllocation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DedicatedAllocationImageCreateInfoNV; }; #endif template <> struct CppType { using Type = DedicatedAllocationImageCreateInfoNV; }; // wrapper struct for struct VkDedicatedAllocationMemoryAllocateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html struct DedicatedAllocationMemoryAllocateInfoNV { using NativeType = VkDedicatedAllocationMemoryAllocateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( Image image_ = {}, Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) { } DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDedicatedAllocationMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDedicatedAllocationMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDedicatedAllocationMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const & ) const = default; #else bool operator==( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); # endif } bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; const void * pNext = {}; Image image = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DedicatedAllocationMemoryAllocateInfoNV; }; #endif template <> struct CppType { using Type = DedicatedAllocationMemoryAllocateInfoNV; }; // wrapper struct for struct VkMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier2.html struct MemoryBarrier2 { using NativeType = VkMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryBarrier2( PipelineStageFlags2 srcStageMask_ = {}, AccessFlags2 srcAccessMask_ = {}, PipelineStageFlags2 dstStageMask_ = {}, AccessFlags2 dstAccessMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcStageMask{ srcStageMask_ } , srcAccessMask{ srcAccessMask_ } , dstStageMask{ dstStageMask_ } , dstAccessMask{ dstAccessMask_ } { } VULKAN_HPP_CONSTEXPR MemoryBarrier2( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier2( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier2( *reinterpret_cast( &rhs ) ) {} MemoryBarrier2 & operator=( MemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryBarrier2 & operator=( VkMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) & VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 && setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) && VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 && setSrcAccessMask( AccessFlags2 srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) & VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 && setDstStageMask( PipelineStageFlags2 dstStageMask_ ) && VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier2 && setDstAccessMask( AccessFlags2 dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryBarrier2 const & ) const = default; #else bool operator==( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ); # endif } bool operator!=( MemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryBarrier2; const void * pNext = {}; PipelineStageFlags2 srcStageMask = {}; AccessFlags2 srcAccessMask = {}; PipelineStageFlags2 dstStageMask = {}; AccessFlags2 dstAccessMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryBarrier2; }; #endif template <> struct CppType { using Type = MemoryBarrier2; }; using MemoryBarrier2KHR = MemoryBarrier2; // wrapper struct for struct VkImageSubresourceRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresourceRange.html struct ImageSubresourceRange { using NativeType = VkImageSubresourceRange; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT : aspectMask{ aspectMask_ } , baseMipLevel{ baseMipLevel_ } , levelCount{ levelCount_ } , baseArrayLayer{ baseArrayLayer_ } , layerCount{ layerCount_ } { } VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresourceRange( *reinterpret_cast( &rhs ) ) { } ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) & VULKAN_HPP_NOEXCEPT { baseMipLevel = baseMipLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange && setBaseMipLevel( uint32_t baseMipLevel_ ) && VULKAN_HPP_NOEXCEPT { baseMipLevel = baseMipLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) & VULKAN_HPP_NOEXCEPT { levelCount = levelCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange && setLevelCount( uint32_t levelCount_ ) && VULKAN_HPP_NOEXCEPT { levelCount = levelCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) & VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange && setBaseArrayLayer( uint32_t baseArrayLayer_ ) && VULKAN_HPP_NOEXCEPT { baseArrayLayer = baseArrayLayer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) & VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange && setLayerCount( uint32_t layerCount_ ) && VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresourceRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresourceRange const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSubresourceRange *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( aspectMask, baseMipLevel, levelCount, baseArrayLayer, layerCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSubresourceRange const & ) const = default; #else bool operator==( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( aspectMask == rhs.aspectMask ) && ( baseMipLevel == rhs.baseMipLevel ) && ( levelCount == rhs.levelCount ) && ( baseArrayLayer == rhs.baseArrayLayer ) && ( layerCount == rhs.layerCount ); # endif } bool operator!=( ImageSubresourceRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageAspectFlags aspectMask = {}; uint32_t baseMipLevel = {}; uint32_t levelCount = {}; uint32_t baseArrayLayer = {}; uint32_t layerCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSubresourceRange; }; #endif // wrapper struct for struct VkImageMemoryBarrier2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier2.html struct ImageMemoryBarrier2 { using NativeType = VkImageMemoryBarrier2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( PipelineStageFlags2 srcStageMask_ = {}, AccessFlags2 srcAccessMask_ = {}, PipelineStageFlags2 dstStageMask_ = {}, AccessFlags2 dstAccessMask_ = {}, ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, Image image_ = {}, ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcStageMask{ srcStageMask_ } , srcAccessMask{ srcAccessMask_ } , dstStageMask{ dstStageMask_ } , dstAccessMask{ dstAccessMask_ } , oldLayout{ oldLayout_ } , newLayout{ newLayout_ } , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } , image{ image_ } , subresourceRange{ subresourceRange_ } { } VULKAN_HPP_CONSTEXPR ImageMemoryBarrier2( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier2( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier2( *reinterpret_cast( &rhs ) ) { } ImageMemoryBarrier2 & operator=( ImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageMemoryBarrier2 & operator=( VkImageMemoryBarrier2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) & VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setSrcStageMask( PipelineStageFlags2 srcStageMask_ ) && VULKAN_HPP_NOEXCEPT { srcStageMask = srcStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcAccessMask( AccessFlags2 srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setSrcAccessMask( AccessFlags2 srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstStageMask( PipelineStageFlags2 dstStageMask_ ) & VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setDstStageMask( PipelineStageFlags2 dstStageMask_ ) && VULKAN_HPP_NOEXCEPT { dstStageMask = dstStageMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstAccessMask( AccessFlags2 dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setDstAccessMask( AccessFlags2 dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setOldLayout( ImageLayout oldLayout_ ) & VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setOldLayout( ImageLayout oldLayout_ ) && VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setNewLayout( ImageLayout newLayout_ ) & VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setNewLayout( ImageLayout newLayout_ ) && VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier2 && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageMemoryBarrier2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryBarrier2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryBarrier2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageMemoryBarrier2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcStageMask, srcAccessMask, dstStageMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageMemoryBarrier2 const & ) const = default; #else bool operator==( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcStageMask == rhs.srcStageMask ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstStageMask == rhs.dstStageMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); # endif } bool operator!=( ImageMemoryBarrier2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageMemoryBarrier2; const void * pNext = {}; PipelineStageFlags2 srcStageMask = {}; AccessFlags2 srcAccessMask = {}; PipelineStageFlags2 dstStageMask = {}; AccessFlags2 dstAccessMask = {}; ImageLayout oldLayout = ImageLayout::eUndefined; ImageLayout newLayout = ImageLayout::eUndefined; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; Image image = {}; ImageSubresourceRange subresourceRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageMemoryBarrier2; }; #endif template <> struct CppType { using Type = ImageMemoryBarrier2; }; using ImageMemoryBarrier2KHR = ImageMemoryBarrier2; // wrapper struct for struct VkDependencyInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDependencyInfo.html struct DependencyInfo { using NativeType = VkDependencyInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDependencyInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyFlags dependencyFlags_ = {}, uint32_t memoryBarrierCount_ = {}, const MemoryBarrier2 * pMemoryBarriers_ = {}, uint32_t bufferMemoryBarrierCount_ = {}, const BufferMemoryBarrier2 * pBufferMemoryBarriers_ = {}, uint32_t imageMemoryBarrierCount_ = {}, const ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dependencyFlags{ dependencyFlags_ } , memoryBarrierCount{ memoryBarrierCount_ } , pMemoryBarriers{ pMemoryBarriers_ } , bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ } , pBufferMemoryBarriers{ pBufferMemoryBarriers_ } , imageMemoryBarrierCount{ imageMemoryBarrierCount_ } , pImageMemoryBarriers{ pImageMemoryBarriers_ } { } VULKAN_HPP_CONSTEXPR DependencyInfo( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DependencyInfo( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DependencyInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DependencyInfo( DependencyFlags dependencyFlags_, ArrayProxyNoTemporaries const & memoryBarriers_, ArrayProxyNoTemporaries const & bufferMemoryBarriers_ = {}, ArrayProxyNoTemporaries const & imageMemoryBarriers_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , dependencyFlags( dependencyFlags_ ) , memoryBarrierCount( static_cast( memoryBarriers_.size() ) ) , pMemoryBarriers( memoryBarriers_.data() ) , bufferMemoryBarrierCount( static_cast( bufferMemoryBarriers_.size() ) ) , pBufferMemoryBarriers( bufferMemoryBarriers_.data() ) , imageMemoryBarrierCount( static_cast( imageMemoryBarriers_.size() ) ) , pImageMemoryBarriers( imageMemoryBarriers_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DependencyInfo & operator=( DependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DependencyInfo & operator=( VkDependencyInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setDependencyFlags( DependencyFlags dependencyFlags_ ) & VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setDependencyFlags( DependencyFlags dependencyFlags_ ) && VULKAN_HPP_NOEXCEPT { dependencyFlags = dependencyFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) & VULKAN_HPP_NOEXCEPT { memoryBarrierCount = memoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setMemoryBarrierCount( uint32_t memoryBarrierCount_ ) && VULKAN_HPP_NOEXCEPT { memoryBarrierCount = memoryBarrierCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPMemoryBarriers( const MemoryBarrier2 * pMemoryBarriers_ ) & VULKAN_HPP_NOEXCEPT { pMemoryBarriers = pMemoryBarriers_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setPMemoryBarriers( const MemoryBarrier2 * pMemoryBarriers_ ) && VULKAN_HPP_NOEXCEPT { pMemoryBarriers = pMemoryBarriers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DependencyInfo & setMemoryBarriers( ArrayProxyNoTemporaries const & memoryBarriers_ ) VULKAN_HPP_NOEXCEPT { memoryBarrierCount = static_cast( memoryBarriers_.size() ); pMemoryBarriers = memoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) & VULKAN_HPP_NOEXCEPT { bufferMemoryBarrierCount = bufferMemoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setBufferMemoryBarrierCount( uint32_t bufferMemoryBarrierCount_ ) && VULKAN_HPP_NOEXCEPT { bufferMemoryBarrierCount = bufferMemoryBarrierCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPBufferMemoryBarriers( const BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) & VULKAN_HPP_NOEXCEPT { pBufferMemoryBarriers = pBufferMemoryBarriers_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setPBufferMemoryBarriers( const BufferMemoryBarrier2 * pBufferMemoryBarriers_ ) && VULKAN_HPP_NOEXCEPT { pBufferMemoryBarriers = pBufferMemoryBarriers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DependencyInfo & setBufferMemoryBarriers( ArrayProxyNoTemporaries const & bufferMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { bufferMemoryBarrierCount = static_cast( bufferMemoryBarriers_.size() ); pBufferMemoryBarriers = bufferMemoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) & VULKAN_HPP_NOEXCEPT { imageMemoryBarrierCount = imageMemoryBarrierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setImageMemoryBarrierCount( uint32_t imageMemoryBarrierCount_ ) && VULKAN_HPP_NOEXCEPT { imageMemoryBarrierCount = imageMemoryBarrierCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DependencyInfo & setPImageMemoryBarriers( const ImageMemoryBarrier2 * pImageMemoryBarriers_ ) & VULKAN_HPP_NOEXCEPT { pImageMemoryBarriers = pImageMemoryBarriers_; return *this; } VULKAN_HPP_CONSTEXPR_14 DependencyInfo && setPImageMemoryBarriers( const ImageMemoryBarrier2 * pImageMemoryBarriers_ ) && VULKAN_HPP_NOEXCEPT { pImageMemoryBarriers = pImageMemoryBarriers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DependencyInfo & setImageMemoryBarriers( ArrayProxyNoTemporaries const & imageMemoryBarriers_ ) VULKAN_HPP_NOEXCEPT { imageMemoryBarrierCount = static_cast( imageMemoryBarriers_.size() ); pImageMemoryBarriers = imageMemoryBarriers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDependencyInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDependencyInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDependencyInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDependencyInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DependencyInfo const & ) const = default; #else bool operator==( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dependencyFlags == rhs.dependencyFlags ) && ( memoryBarrierCount == rhs.memoryBarrierCount ) && ( pMemoryBarriers == rhs.pMemoryBarriers ) && ( bufferMemoryBarrierCount == rhs.bufferMemoryBarrierCount ) && ( pBufferMemoryBarriers == rhs.pBufferMemoryBarriers ) && ( imageMemoryBarrierCount == rhs.imageMemoryBarrierCount ) && ( pImageMemoryBarriers == rhs.pImageMemoryBarriers ); # endif } bool operator!=( DependencyInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDependencyInfo; const void * pNext = {}; DependencyFlags dependencyFlags = {}; uint32_t memoryBarrierCount = {}; const MemoryBarrier2 * pMemoryBarriers = {}; uint32_t bufferMemoryBarrierCount = {}; const BufferMemoryBarrier2 * pBufferMemoryBarriers = {}; uint32_t imageMemoryBarrierCount = {}; const ImageMemoryBarrier2 * pImageMemoryBarriers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DependencyInfo; }; #endif template <> struct CppType { using Type = DependencyInfo; }; using DependencyInfoKHR = DependencyInfo; // wrapper struct for struct VkDepthBiasInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasInfoEXT.html struct DepthBiasInfoEXT { using NativeType = VkDepthBiasInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthBiasConstantFactor{ depthBiasConstantFactor_ } , depthBiasClamp{ depthBiasClamp_ } , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } { } VULKAN_HPP_CONSTEXPR DepthBiasInfoEXT( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DepthBiasInfoEXT( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasInfoEXT( *reinterpret_cast( &rhs ) ) {} DepthBiasInfoEXT & operator=( DepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DepthBiasInfoEXT & operator=( VkDepthBiasInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) & VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT && setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) && VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasClamp( float depthBiasClamp_ ) & VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT && setDepthBiasClamp( float depthBiasClamp_ ) && VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) & VULKAN_HPP_NOEXCEPT { depthBiasSlopeFactor = depthBiasSlopeFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasInfoEXT && setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) && VULKAN_HPP_NOEXCEPT { depthBiasSlopeFactor = depthBiasSlopeFactor_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDepthBiasInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthBiasInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthBiasInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDepthBiasInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DepthBiasInfoEXT const & ) const = default; #else bool operator==( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ); # endif } bool operator!=( DepthBiasInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDepthBiasInfoEXT; const void * pNext = {}; float depthBiasConstantFactor = {}; float depthBiasClamp = {}; float depthBiasSlopeFactor = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DepthBiasInfoEXT; }; #endif template <> struct CppType { using Type = DepthBiasInfoEXT; }; // wrapper struct for struct VkDepthBiasRepresentationInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthBiasRepresentationInfoEXT.html struct DepthBiasRepresentationInfoEXT { using NativeType = VkDepthBiasRepresentationInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDepthBiasRepresentationInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationEXT depthBiasRepresentation_ = DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, Bool32 depthBiasExact_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthBiasRepresentation{ depthBiasRepresentation_ } , depthBiasExact{ depthBiasExact_ } { } VULKAN_HPP_CONSTEXPR DepthBiasRepresentationInfoEXT( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DepthBiasRepresentationInfoEXT( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthBiasRepresentationInfoEXT( *reinterpret_cast( &rhs ) ) { } DepthBiasRepresentationInfoEXT & operator=( DepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DepthBiasRepresentationInfoEXT & operator=( VkDepthBiasRepresentationInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasRepresentation( DepthBiasRepresentationEXT depthBiasRepresentation_ ) & VULKAN_HPP_NOEXCEPT { depthBiasRepresentation = depthBiasRepresentation_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT && setDepthBiasRepresentation( DepthBiasRepresentationEXT depthBiasRepresentation_ ) && VULKAN_HPP_NOEXCEPT { depthBiasRepresentation = depthBiasRepresentation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT & setDepthBiasExact( Bool32 depthBiasExact_ ) & VULKAN_HPP_NOEXCEPT { depthBiasExact = depthBiasExact_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthBiasRepresentationInfoEXT && setDepthBiasExact( Bool32 depthBiasExact_ ) && VULKAN_HPP_NOEXCEPT { depthBiasExact = depthBiasExact_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDepthBiasRepresentationInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthBiasRepresentationInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthBiasRepresentationInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDepthBiasRepresentationInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthBiasRepresentation, depthBiasExact ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DepthBiasRepresentationInfoEXT const & ) const = default; #else bool operator==( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasRepresentation == rhs.depthBiasRepresentation ) && ( depthBiasExact == rhs.depthBiasExact ); # endif } bool operator!=( DepthBiasRepresentationInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDepthBiasRepresentationInfoEXT; const void * pNext = {}; DepthBiasRepresentationEXT depthBiasRepresentation = DepthBiasRepresentationEXT::eLeastRepresentableValueFormat; Bool32 depthBiasExact = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DepthBiasRepresentationInfoEXT; }; #endif template <> struct CppType { using Type = DepthBiasRepresentationInfoEXT; }; // wrapper struct for struct VkDepthClampRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDepthClampRangeEXT.html struct DepthClampRangeEXT { using NativeType = VkDepthClampRangeEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( float minDepthClamp_ = {}, float maxDepthClamp_ = {} ) VULKAN_HPP_NOEXCEPT : minDepthClamp{ minDepthClamp_ } , maxDepthClamp{ maxDepthClamp_ } { } VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthClampRangeEXT( *reinterpret_cast( &rhs ) ) {} DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) & VULKAN_HPP_NOEXCEPT { minDepthClamp = minDepthClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT && setMinDepthClamp( float minDepthClamp_ ) && VULKAN_HPP_NOEXCEPT { minDepthClamp = minDepthClamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) & VULKAN_HPP_NOEXCEPT { maxDepthClamp = maxDepthClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT && setMaxDepthClamp( float maxDepthClamp_ ) && VULKAN_HPP_NOEXCEPT { maxDepthClamp = maxDepthClamp_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDepthClampRangeEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDepthClampRangeEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( minDepthClamp, maxDepthClamp ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DepthClampRangeEXT const & ) const = default; #else bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( minDepthClamp == rhs.minDepthClamp ) && ( maxDepthClamp == rhs.maxDepthClamp ); # endif } bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float minDepthClamp = {}; float maxDepthClamp = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DepthClampRangeEXT; }; #endif // wrapper struct for struct VkDescriptorAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorAddressInfoEXT.html struct DescriptorAddressInfoEXT { using NativeType = VkDescriptorAddressInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorAddressInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DeviceAddress address_ = {}, DeviceSize range_ = {}, Format format_ = Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , address{ address_ } , range{ range_ } , format{ format_ } { } VULKAN_HPP_CONSTEXPR DescriptorAddressInfoEXT( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorAddressInfoEXT( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorAddressInfoEXT( *reinterpret_cast( &rhs ) ) { } DescriptorAddressInfoEXT & operator=( DescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorAddressInfoEXT & operator=( VkDescriptorAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setAddress( DeviceAddress address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT && setAddress( DeviceAddress address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setRange( DeviceSize range_ ) & VULKAN_HPP_NOEXCEPT { range = range_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT && setRange( DeviceSize range_ ) && VULKAN_HPP_NOEXCEPT { range = range_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorAddressInfoEXT && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorAddressInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorAddressInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, address, range, format ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorAddressInfoEXT const & ) const = default; #else bool operator==( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( range == rhs.range ) && ( format == rhs.format ); # endif } bool operator!=( DescriptorAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorAddressInfoEXT; void * pNext = {}; DeviceAddress address = {}; DeviceSize range = {}; Format format = Format::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorAddressInfoEXT; }; #endif template <> struct CppType { using Type = DescriptorAddressInfoEXT; }; // wrapper struct for struct VkDescriptorBufferBindingInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingInfoEXT.html struct DescriptorBufferBindingInfoEXT { using NativeType = VkDescriptorBufferBindingInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DeviceAddress address_ = {}, BufferUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , address{ address_ } , usage{ usage_ } { } VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorBufferBindingInfoEXT( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorBufferBindingInfoEXT( *reinterpret_cast( &rhs ) ) { } DescriptorBufferBindingInfoEXT & operator=( DescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorBufferBindingInfoEXT & operator=( VkDescriptorBufferBindingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setAddress( DeviceAddress address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT && setAddress( DeviceAddress address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setUsage( BufferUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT && setUsage( BufferUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorBufferBindingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferBindingInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferBindingInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorBufferBindingInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, address, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorBufferBindingInfoEXT const & ) const = default; #else bool operator==( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( address == rhs.address ) && ( usage == rhs.usage ); # endif } bool operator!=( DescriptorBufferBindingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT; const void * pNext = {}; DeviceAddress address = {}; BufferUsageFlags usage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorBufferBindingInfoEXT; }; #endif template <> struct CppType { using Type = DescriptorBufferBindingInfoEXT; }; // wrapper struct for struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferBindingPushDescriptorBufferHandleEXT.html struct DescriptorBufferBindingPushDescriptorBufferHandleEXT { using NativeType = VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorBufferBindingPushDescriptorBufferHandleEXT( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorBufferBindingPushDescriptorBufferHandleEXT( *reinterpret_cast( &rhs ) ) { } DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorBufferBindingPushDescriptorBufferHandleEXT & operator=( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorBufferBindingPushDescriptorBufferHandleEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & ) const = default; #else bool operator==( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( DescriptorBufferBindingPushDescriptorBufferHandleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; const void * pNext = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT; }; #endif template <> struct CppType { using Type = DescriptorBufferBindingPushDescriptorBufferHandleEXT; }; // wrapper struct for struct VkDescriptorBufferInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorBufferInfo.html struct DescriptorBufferInfo { using NativeType = VkDescriptorBufferInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT : buffer{ buffer_ } , offset{ offset_ } , range{ range_ } { } VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorBufferInfo( *reinterpret_cast( &rhs ) ) { } DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & setRange( DeviceSize range_ ) & VULKAN_HPP_NOEXCEPT { range = range_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo && setRange( DeviceSize range_ ) && VULKAN_HPP_NOEXCEPT { range = range_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorBufferInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorBufferInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorBufferInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( buffer, offset, range ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorBufferInfo const & ) const = default; #else bool operator==( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( range == rhs.range ); # endif } bool operator!=( DescriptorBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Buffer buffer = {}; DeviceSize offset = {}; DeviceSize range = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorBufferInfo; }; #endif // wrapper struct for struct VkDescriptorImageInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorImageInfo.html struct DescriptorImageInfo { using NativeType = VkDescriptorImageInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorImageInfo( Sampler sampler_ = {}, ImageView imageView_ = {}, ImageLayout imageLayout_ = ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT : sampler{ sampler_ } , imageView{ imageView_ } , imageLayout{ imageLayout_ } { } VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorImageInfo( *reinterpret_cast( &rhs ) ) { } DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setSampler( Sampler sampler_ ) & VULKAN_HPP_NOEXCEPT { sampler = sampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo && setSampler( Sampler sampler_ ) && VULKAN_HPP_NOEXCEPT { sampler = sampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageView( ImageView imageView_ ) & VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo && setImageView( ImageView imageView_ ) && VULKAN_HPP_NOEXCEPT { imageView = imageView_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & setImageLayout( ImageLayout imageLayout_ ) & VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo && setImageLayout( ImageLayout imageLayout_ ) && VULKAN_HPP_NOEXCEPT { imageLayout = imageLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorImageInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorImageInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sampler, imageView, imageLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorImageInfo const & ) const = default; #else bool operator==( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sampler == rhs.sampler ) && ( imageView == rhs.imageView ) && ( imageLayout == rhs.imageLayout ); # endif } bool operator!=( DescriptorImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Sampler sampler = {}; ImageView imageView = {}; ImageLayout imageLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorImageInfo; }; #endif union DescriptorDataEXT { using NativeType = VkDescriptorDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const Sampler * pSampler_ = {} ) : pSampler( pSampler_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const DescriptorImageInfo * pDescriptorImageInfo_ ) : pCombinedImageSampler( pDescriptorImageInfo_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( const DescriptorAddressInfoEXT * pDescriptorAddressInfoEXT_ ) : pUniformTexelBuffer( pDescriptorAddressInfoEXT_ ) { } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT( DeviceAddress accelerationStructure_ ) : accelerationStructure( accelerationStructure_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampler( const Sampler * pSampler_ ) & VULKAN_HPP_NOEXCEPT { pSampler = pSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPSampler( const Sampler * pSampler_ ) && VULKAN_HPP_NOEXCEPT { pSampler = pSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPCombinedImageSampler( const DescriptorImageInfo * pCombinedImageSampler_ ) & VULKAN_HPP_NOEXCEPT { pCombinedImageSampler = pCombinedImageSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPCombinedImageSampler( const DescriptorImageInfo * pCombinedImageSampler_ ) && VULKAN_HPP_NOEXCEPT { pCombinedImageSampler = pCombinedImageSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPInputAttachmentImage( const DescriptorImageInfo * pInputAttachmentImage_ ) & VULKAN_HPP_NOEXCEPT { pInputAttachmentImage = pInputAttachmentImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPInputAttachmentImage( const DescriptorImageInfo * pInputAttachmentImage_ ) && VULKAN_HPP_NOEXCEPT { pInputAttachmentImage = pInputAttachmentImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPSampledImage( const DescriptorImageInfo * pSampledImage_ ) & VULKAN_HPP_NOEXCEPT { pSampledImage = pSampledImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPSampledImage( const DescriptorImageInfo * pSampledImage_ ) && VULKAN_HPP_NOEXCEPT { pSampledImage = pSampledImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageImage( const DescriptorImageInfo * pStorageImage_ ) & VULKAN_HPP_NOEXCEPT { pStorageImage = pStorageImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPStorageImage( const DescriptorImageInfo * pStorageImage_ ) && VULKAN_HPP_NOEXCEPT { pStorageImage = pStorageImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformTexelBuffer( const DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) & VULKAN_HPP_NOEXCEPT { pUniformTexelBuffer = pUniformTexelBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPUniformTexelBuffer( const DescriptorAddressInfoEXT * pUniformTexelBuffer_ ) && VULKAN_HPP_NOEXCEPT { pUniformTexelBuffer = pUniformTexelBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageTexelBuffer( const DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) & VULKAN_HPP_NOEXCEPT { pStorageTexelBuffer = pStorageTexelBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPStorageTexelBuffer( const DescriptorAddressInfoEXT * pStorageTexelBuffer_ ) && VULKAN_HPP_NOEXCEPT { pStorageTexelBuffer = pStorageTexelBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPUniformBuffer( const DescriptorAddressInfoEXT * pUniformBuffer_ ) & VULKAN_HPP_NOEXCEPT { pUniformBuffer = pUniformBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPUniformBuffer( const DescriptorAddressInfoEXT * pUniformBuffer_ ) && VULKAN_HPP_NOEXCEPT { pUniformBuffer = pUniformBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setPStorageBuffer( const DescriptorAddressInfoEXT * pStorageBuffer_ ) & VULKAN_HPP_NOEXCEPT { pStorageBuffer = pStorageBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setPStorageBuffer( const DescriptorAddressInfoEXT * pStorageBuffer_ ) && VULKAN_HPP_NOEXCEPT { pStorageBuffer = pStorageBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT & setAccelerationStructure( DeviceAddress accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorDataEXT && setAccelerationStructure( DeviceAddress accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorDataEXT const &() const { return *reinterpret_cast( this ); } operator VkDescriptorDataEXT &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS const Sampler * pSampler; const DescriptorImageInfo * pCombinedImageSampler; const DescriptorImageInfo * pInputAttachmentImage; const DescriptorImageInfo * pSampledImage; const DescriptorImageInfo * pStorageImage; const DescriptorAddressInfoEXT * pUniformTexelBuffer; const DescriptorAddressInfoEXT * pStorageTexelBuffer; const DescriptorAddressInfoEXT * pUniformBuffer; const DescriptorAddressInfoEXT * pStorageBuffer; DeviceAddress accelerationStructure; #else const VkSampler * pSampler; const VkDescriptorImageInfo * pCombinedImageSampler; const VkDescriptorImageInfo * pInputAttachmentImage; const VkDescriptorImageInfo * pSampledImage; const VkDescriptorImageInfo * pStorageImage; const VkDescriptorAddressInfoEXT * pUniformTexelBuffer; const VkDescriptorAddressInfoEXT * pStorageTexelBuffer; const VkDescriptorAddressInfoEXT * pUniformBuffer; const VkDescriptorAddressInfoEXT * pStorageBuffer; VkDeviceAddress accelerationStructure; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorDataEXT; }; #endif // wrapper struct for struct VkDescriptorGetInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetInfoEXT.html struct DescriptorGetInfoEXT { using NativeType = VkDescriptorGetInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorType type_ = DescriptorType::eSampler, DescriptorDataEXT data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , data{ data_ } { } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorGetInfoEXT( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorGetInfoEXT( *reinterpret_cast( &rhs ) ) { } DescriptorGetInfoEXT & operator=( DescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorGetInfoEXT & operator=( VkDescriptorGetInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setType( DescriptorType type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT && setType( DescriptorType type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT & setData( DescriptorDataEXT const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT && setData( DescriptorDataEXT const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorGetInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorGetInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorGetInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorGetInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, data ); } #endif public: StructureType sType = StructureType::eDescriptorGetInfoEXT; const void * pNext = {}; DescriptorType type = DescriptorType::eSampler; DescriptorDataEXT data = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorGetInfoEXT; }; #endif template <> struct CppType { using Type = DescriptorGetInfoEXT; }; // wrapper struct for struct VkDescriptorGetTensorInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorGetTensorInfoARM.html struct DescriptorGetTensorInfoARM { using NativeType = VkDescriptorGetTensorInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorGetTensorInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM( TensorViewARM tensorView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensorView{ tensorView_ } { } VULKAN_HPP_CONSTEXPR DescriptorGetTensorInfoARM( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorGetTensorInfoARM( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorGetTensorInfoARM( *reinterpret_cast( &rhs ) ) { } DescriptorGetTensorInfoARM & operator=( DescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorGetTensorInfoARM & operator=( VkDescriptorGetTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM & setTensorView( TensorViewARM tensorView_ ) & VULKAN_HPP_NOEXCEPT { tensorView = tensorView_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorGetTensorInfoARM && setTensorView( TensorViewARM tensorView_ ) && VULKAN_HPP_NOEXCEPT { tensorView = tensorView_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorGetTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorGetTensorInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorGetTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorGetTensorInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensorView ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorGetTensorInfoARM const & ) const = default; #else bool operator==( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorView == rhs.tensorView ); # endif } bool operator!=( DescriptorGetTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorGetTensorInfoARM; const void * pNext = {}; TensorViewARM tensorView = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorGetTensorInfoARM; }; #endif template <> struct CppType { using Type = DescriptorGetTensorInfoARM; }; // wrapper struct for struct VkSamplerCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSamplerCreateInfo.html struct SamplerCreateInfo { using NativeType = VkSamplerCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateFlags flags_ = {}, Filter magFilter_ = Filter::eNearest, Filter minFilter_ = Filter::eNearest, SamplerMipmapMode mipmapMode_ = SamplerMipmapMode::eNearest, SamplerAddressMode addressModeU_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeV_ = SamplerAddressMode::eRepeat, SamplerAddressMode addressModeW_ = SamplerAddressMode::eRepeat, float mipLodBias_ = {}, Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, Bool32 compareEnable_ = {}, CompareOp compareOp_ = CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, BorderColor borderColor_ = BorderColor::eFloatTransparentBlack, Bool32 unnormalizedCoordinates_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , magFilter{ magFilter_ } , minFilter{ minFilter_ } , mipmapMode{ mipmapMode_ } , addressModeU{ addressModeU_ } , addressModeV{ addressModeV_ } , addressModeW{ addressModeW_ } , mipLodBias{ mipLodBias_ } , anisotropyEnable{ anisotropyEnable_ } , maxAnisotropy{ maxAnisotropy_ } , compareEnable{ compareEnable_ } , compareOp{ compareOp_ } , minLod{ minLod_ } , maxLod{ maxLod_ } , borderColor{ borderColor_ } , unnormalizedCoordinates{ unnormalizedCoordinates_ } { } VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : SamplerCreateInfo( *reinterpret_cast( &rhs ) ) {} SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setFlags( SamplerCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setFlags( SamplerCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMagFilter( Filter magFilter_ ) & VULKAN_HPP_NOEXCEPT { magFilter = magFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMagFilter( Filter magFilter_ ) && VULKAN_HPP_NOEXCEPT { magFilter = magFilter_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinFilter( Filter minFilter_ ) & VULKAN_HPP_NOEXCEPT { minFilter = minFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinFilter( Filter minFilter_ ) && VULKAN_HPP_NOEXCEPT { minFilter = minFilter_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipmapMode( SamplerMipmapMode mipmapMode_ ) & VULKAN_HPP_NOEXCEPT { mipmapMode = mipmapMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipmapMode( SamplerMipmapMode mipmapMode_ ) && VULKAN_HPP_NOEXCEPT { mipmapMode = mipmapMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeU( SamplerAddressMode addressModeU_ ) & VULKAN_HPP_NOEXCEPT { addressModeU = addressModeU_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeU( SamplerAddressMode addressModeU_ ) && VULKAN_HPP_NOEXCEPT { addressModeU = addressModeU_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeV( SamplerAddressMode addressModeV_ ) & VULKAN_HPP_NOEXCEPT { addressModeV = addressModeV_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeV( SamplerAddressMode addressModeV_ ) && VULKAN_HPP_NOEXCEPT { addressModeV = addressModeV_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAddressModeW( SamplerAddressMode addressModeW_ ) & VULKAN_HPP_NOEXCEPT { addressModeW = addressModeW_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAddressModeW( SamplerAddressMode addressModeW_ ) && VULKAN_HPP_NOEXCEPT { addressModeW = addressModeW_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) & VULKAN_HPP_NOEXCEPT { mipLodBias = mipLodBias_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMipLodBias( float mipLodBias_ ) && VULKAN_HPP_NOEXCEPT { mipLodBias = mipLodBias_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setAnisotropyEnable( Bool32 anisotropyEnable_ ) & VULKAN_HPP_NOEXCEPT { anisotropyEnable = anisotropyEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setAnisotropyEnable( Bool32 anisotropyEnable_ ) && VULKAN_HPP_NOEXCEPT { anisotropyEnable = anisotropyEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) & VULKAN_HPP_NOEXCEPT { maxAnisotropy = maxAnisotropy_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxAnisotropy( float maxAnisotropy_ ) && VULKAN_HPP_NOEXCEPT { maxAnisotropy = maxAnisotropy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareEnable( Bool32 compareEnable_ ) & VULKAN_HPP_NOEXCEPT { compareEnable = compareEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareEnable( Bool32 compareEnable_ ) && VULKAN_HPP_NOEXCEPT { compareEnable = compareEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setCompareOp( CompareOp compareOp_ ) & VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setCompareOp( CompareOp compareOp_ ) && VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMinLod( float minLod_ ) & VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMinLod( float minLod_ ) && VULKAN_HPP_NOEXCEPT { minLod = minLod_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setMaxLod( float maxLod_ ) & VULKAN_HPP_NOEXCEPT { maxLod = maxLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setMaxLod( float maxLod_ ) && VULKAN_HPP_NOEXCEPT { maxLod = maxLod_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setBorderColor( BorderColor borderColor_ ) & VULKAN_HPP_NOEXCEPT { borderColor = borderColor_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setBorderColor( BorderColor borderColor_ ) && VULKAN_HPP_NOEXCEPT { borderColor = borderColor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) & VULKAN_HPP_NOEXCEPT { unnormalizedCoordinates = unnormalizedCoordinates_; return *this; } VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo && setUnnormalizedCoordinates( Bool32 unnormalizedCoordinates_ ) && VULKAN_HPP_NOEXCEPT { unnormalizedCoordinates = unnormalizedCoordinates_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSamplerCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSamplerCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSamplerCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, magFilter, minFilter, mipmapMode, addressModeU, addressModeV, addressModeW, mipLodBias, anisotropyEnable, maxAnisotropy, compareEnable, compareOp, minLod, maxLod, borderColor, unnormalizedCoordinates ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SamplerCreateInfo const & ) const = default; #else bool operator==( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( magFilter == rhs.magFilter ) && ( minFilter == rhs.minFilter ) && ( mipmapMode == rhs.mipmapMode ) && ( addressModeU == rhs.addressModeU ) && ( addressModeV == rhs.addressModeV ) && ( addressModeW == rhs.addressModeW ) && ( mipLodBias == rhs.mipLodBias ) && ( anisotropyEnable == rhs.anisotropyEnable ) && ( maxAnisotropy == rhs.maxAnisotropy ) && ( compareEnable == rhs.compareEnable ) && ( compareOp == rhs.compareOp ) && ( minLod == rhs.minLod ) && ( maxLod == rhs.maxLod ) && ( borderColor == rhs.borderColor ) && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); # endif } bool operator!=( SamplerCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eSamplerCreateInfo; const void * pNext = {}; SamplerCreateFlags flags = {}; Filter magFilter = Filter::eNearest; Filter minFilter = Filter::eNearest; SamplerMipmapMode mipmapMode = SamplerMipmapMode::eNearest; SamplerAddressMode addressModeU = SamplerAddressMode::eRepeat; SamplerAddressMode addressModeV = SamplerAddressMode::eRepeat; SamplerAddressMode addressModeW = SamplerAddressMode::eRepeat; float mipLodBias = {}; Bool32 anisotropyEnable = {}; float maxAnisotropy = {}; Bool32 compareEnable = {}; CompareOp compareOp = CompareOp::eNever; float minLod = {}; float maxLod = {}; BorderColor borderColor = BorderColor::eFloatTransparentBlack; Bool32 unnormalizedCoordinates = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SamplerCreateInfo; }; #endif template <> struct CppType { using Type = SamplerCreateInfo; }; // wrapper struct for struct VkDescriptorMappingSourceConstantOffsetEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceConstantOffsetEXT.html struct DescriptorMappingSourceConstantOffsetEXT { using NativeType = VkDescriptorMappingSourceConstantOffsetEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceConstantOffsetEXT( uint32_t heapOffset_ = {}, uint32_t heapArrayStride_ = {}, const SamplerCreateInfo * pEmbeddedSampler_ = {}, uint32_t samplerHeapOffset_ = {}, uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , heapArrayStride{ heapArrayStride_ } , pEmbeddedSampler{ pEmbeddedSampler_ } , samplerHeapOffset{ samplerHeapOffset_ } , samplerHeapArrayStride{ samplerHeapArrayStride_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceConstantOffsetEXT( DescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceConstantOffsetEXT( VkDescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceConstantOffsetEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceConstantOffsetEXT & operator=( DescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceConstantOffsetEXT & operator=( VkDescriptorMappingSourceConstantOffsetEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceConstantOffsetEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceConstantOffsetEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceConstantOffsetEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceConstantOffsetEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceConstantOffsetEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, heapArrayStride, pEmbeddedSampler, samplerHeapOffset, samplerHeapArrayStride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceConstantOffsetEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceConstantOffsetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); # endif } bool operator!=( DescriptorMappingSourceConstantOffsetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t heapArrayStride = {}; const SamplerCreateInfo * pEmbeddedSampler = {}; uint32_t samplerHeapOffset = {}; uint32_t samplerHeapArrayStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceConstantOffsetEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourcePushIndexEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourcePushIndexEXT.html struct DescriptorMappingSourcePushIndexEXT { using NativeType = VkDescriptorMappingSourcePushIndexEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourcePushIndexEXT( uint32_t heapOffset_ = {}, uint32_t pushOffset_ = {}, uint32_t heapIndexStride_ = {}, uint32_t heapArrayStride_ = {}, const SamplerCreateInfo * pEmbeddedSampler_ = {}, Bool32 useCombinedImageSamplerIndex_ = {}, uint32_t samplerHeapOffset_ = {}, uint32_t samplerPushOffset_ = {}, uint32_t samplerHeapIndexStride_ = {}, uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , pushOffset{ pushOffset_ } , heapIndexStride{ heapIndexStride_ } , heapArrayStride{ heapArrayStride_ } , pEmbeddedSampler{ pEmbeddedSampler_ } , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } , samplerHeapOffset{ samplerHeapOffset_ } , samplerPushOffset{ samplerPushOffset_ } , samplerHeapIndexStride{ samplerHeapIndexStride_ } , samplerHeapArrayStride{ samplerHeapArrayStride_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourcePushIndexEXT( DescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourcePushIndexEXT( VkDescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourcePushIndexEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourcePushIndexEXT & operator=( DescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourcePushIndexEXT & operator=( VkDescriptorMappingSourcePushIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourcePushIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourcePushIndexEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourcePushIndexEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourcePushIndexEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourcePushIndexEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, pushOffset, heapIndexStride, heapArrayStride, pEmbeddedSampler, useCombinedImageSamplerIndex, samplerHeapOffset, samplerPushOffset, samplerHeapIndexStride, samplerHeapArrayStride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourcePushIndexEXT const & ) const = default; #else bool operator==( DescriptorMappingSourcePushIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); # endif } bool operator!=( DescriptorMappingSourcePushIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t pushOffset = {}; uint32_t heapIndexStride = {}; uint32_t heapArrayStride = {}; const SamplerCreateInfo * pEmbeddedSampler = {}; Bool32 useCombinedImageSamplerIndex = {}; uint32_t samplerHeapOffset = {}; uint32_t samplerPushOffset = {}; uint32_t samplerHeapIndexStride = {}; uint32_t samplerHeapArrayStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourcePushIndexEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourceIndirectIndexEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectIndexEXT.html struct DescriptorMappingSourceIndirectIndexEXT { using NativeType = VkDescriptorMappingSourceIndirectIndexEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexEXT( uint32_t heapOffset_ = {}, uint32_t pushOffset_ = {}, uint32_t addressOffset_ = {}, uint32_t heapIndexStride_ = {}, uint32_t heapArrayStride_ = {}, const SamplerCreateInfo * pEmbeddedSampler_ = {}, Bool32 useCombinedImageSamplerIndex_ = {}, uint32_t samplerHeapOffset_ = {}, uint32_t samplerPushOffset_ = {}, uint32_t samplerAddressOffset_ = {}, uint32_t samplerHeapIndexStride_ = {}, uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , pushOffset{ pushOffset_ } , addressOffset{ addressOffset_ } , heapIndexStride{ heapIndexStride_ } , heapArrayStride{ heapArrayStride_ } , pEmbeddedSampler{ pEmbeddedSampler_ } , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } , samplerHeapOffset{ samplerHeapOffset_ } , samplerPushOffset{ samplerPushOffset_ } , samplerAddressOffset{ samplerAddressOffset_ } , samplerHeapIndexStride{ samplerHeapIndexStride_ } , samplerHeapArrayStride{ samplerHeapArrayStride_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexEXT( DescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceIndirectIndexEXT( VkDescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceIndirectIndexEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceIndirectIndexEXT & operator=( DescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceIndirectIndexEXT & operator=( VkDescriptorMappingSourceIndirectIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerAddressOffset = samplerAddressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerAddressOffset = samplerAddressOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceIndirectIndexEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, pushOffset, addressOffset, heapIndexStride, heapArrayStride, pEmbeddedSampler, useCombinedImageSamplerIndex, samplerHeapOffset, samplerPushOffset, samplerAddressOffset, samplerHeapIndexStride, samplerHeapArrayStride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceIndirectIndexEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceIndirectIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerAddressOffset == rhs.samplerAddressOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); # endif } bool operator!=( DescriptorMappingSourceIndirectIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t pushOffset = {}; uint32_t addressOffset = {}; uint32_t heapIndexStride = {}; uint32_t heapArrayStride = {}; const SamplerCreateInfo * pEmbeddedSampler = {}; Bool32 useCombinedImageSamplerIndex = {}; uint32_t samplerHeapOffset = {}; uint32_t samplerPushOffset = {}; uint32_t samplerAddressOffset = {}; uint32_t samplerHeapIndexStride = {}; uint32_t samplerHeapArrayStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceIndirectIndexEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourceIndirectIndexArrayEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectIndexArrayEXT.html struct DescriptorMappingSourceIndirectIndexArrayEXT { using NativeType = VkDescriptorMappingSourceIndirectIndexArrayEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexArrayEXT( uint32_t heapOffset_ = {}, uint32_t pushOffset_ = {}, uint32_t addressOffset_ = {}, uint32_t heapIndexStride_ = {}, const SamplerCreateInfo * pEmbeddedSampler_ = {}, Bool32 useCombinedImageSamplerIndex_ = {}, uint32_t samplerHeapOffset_ = {}, uint32_t samplerPushOffset_ = {}, uint32_t samplerAddressOffset_ = {}, uint32_t samplerHeapIndexStride_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , pushOffset{ pushOffset_ } , addressOffset{ addressOffset_ } , heapIndexStride{ heapIndexStride_ } , pEmbeddedSampler{ pEmbeddedSampler_ } , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } , samplerHeapOffset{ samplerHeapOffset_ } , samplerPushOffset{ samplerPushOffset_ } , samplerAddressOffset{ samplerAddressOffset_ } , samplerHeapIndexStride{ samplerHeapIndexStride_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectIndexArrayEXT( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceIndirectIndexArrayEXT( VkDescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceIndirectIndexArrayEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceIndirectIndexArrayEXT & operator=( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceIndirectIndexArrayEXT & operator=( VkDescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerPushOffset( uint32_t samplerPushOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerPushOffset( uint32_t samplerPushOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerPushOffset = samplerPushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerAddressOffset = samplerAddressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerAddressOffset( uint32_t samplerAddressOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerAddressOffset = samplerAddressOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectIndexArrayEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceIndirectIndexArrayEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexArrayEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexArrayEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectIndexArrayEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, pushOffset, addressOffset, heapIndexStride, pEmbeddedSampler, useCombinedImageSamplerIndex, samplerHeapOffset, samplerPushOffset, samplerAddressOffset, samplerHeapIndexStride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceIndirectIndexArrayEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerPushOffset == rhs.samplerPushOffset ) && ( samplerAddressOffset == rhs.samplerAddressOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ); # endif } bool operator!=( DescriptorMappingSourceIndirectIndexArrayEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t pushOffset = {}; uint32_t addressOffset = {}; uint32_t heapIndexStride = {}; const SamplerCreateInfo * pEmbeddedSampler = {}; Bool32 useCombinedImageSamplerIndex = {}; uint32_t samplerHeapOffset = {}; uint32_t samplerPushOffset = {}; uint32_t samplerAddressOffset = {}; uint32_t samplerHeapIndexStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceIndirectIndexArrayEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourceHeapDataEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceHeapDataEXT.html struct DescriptorMappingSourceHeapDataEXT { using NativeType = VkDescriptorMappingSourceHeapDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceHeapDataEXT( uint32_t heapOffset_ = {}, uint32_t pushOffset_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , pushOffset{ pushOffset_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceHeapDataEXT( DescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceHeapDataEXT( VkDescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceHeapDataEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceHeapDataEXT & operator=( DescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceHeapDataEXT & operator=( VkDescriptorMappingSourceHeapDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceHeapDataEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceHeapDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceHeapDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceHeapDataEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceHeapDataEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, pushOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceHeapDataEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceHeapDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( pushOffset == rhs.pushOffset ); # endif } bool operator!=( DescriptorMappingSourceHeapDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t pushOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceHeapDataEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourceIndirectAddressEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceIndirectAddressEXT.html struct DescriptorMappingSourceIndirectAddressEXT { using NativeType = VkDescriptorMappingSourceIndirectAddressEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectAddressEXT( uint32_t pushOffset_ = {}, uint32_t addressOffset_ = {} ) VULKAN_HPP_NOEXCEPT : pushOffset{ pushOffset_ } , addressOffset{ addressOffset_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceIndirectAddressEXT( DescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceIndirectAddressEXT( VkDescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceIndirectAddressEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceIndirectAddressEXT & operator=( DescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceIndirectAddressEXT & operator=( VkDescriptorMappingSourceIndirectAddressEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT & setPushOffset( uint32_t pushOffset_ ) & VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT && setPushOffset( uint32_t pushOffset_ ) && VULKAN_HPP_NOEXCEPT { pushOffset = pushOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT & setAddressOffset( uint32_t addressOffset_ ) & VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceIndirectAddressEXT && setAddressOffset( uint32_t addressOffset_ ) && VULKAN_HPP_NOEXCEPT { addressOffset = addressOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceIndirectAddressEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectAddressEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectAddressEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceIndirectAddressEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( pushOffset, addressOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceIndirectAddressEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceIndirectAddressEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( pushOffset == rhs.pushOffset ) && ( addressOffset == rhs.addressOffset ); # endif } bool operator!=( DescriptorMappingSourceIndirectAddressEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t pushOffset = {}; uint32_t addressOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceIndirectAddressEXT; }; #endif // wrapper struct for struct VkDescriptorMappingSourceShaderRecordIndexEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorMappingSourceShaderRecordIndexEXT.html struct DescriptorMappingSourceShaderRecordIndexEXT { using NativeType = VkDescriptorMappingSourceShaderRecordIndexEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorMappingSourceShaderRecordIndexEXT( uint32_t heapOffset_ = {}, uint32_t shaderRecordOffset_ = {}, uint32_t heapIndexStride_ = {}, uint32_t heapArrayStride_ = {}, const SamplerCreateInfo * pEmbeddedSampler_ = {}, Bool32 useCombinedImageSamplerIndex_ = {}, uint32_t samplerHeapOffset_ = {}, uint32_t samplerShaderRecordOffset_ = {}, uint32_t samplerHeapIndexStride_ = {}, uint32_t samplerHeapArrayStride_ = {} ) VULKAN_HPP_NOEXCEPT : heapOffset{ heapOffset_ } , shaderRecordOffset{ shaderRecordOffset_ } , heapIndexStride{ heapIndexStride_ } , heapArrayStride{ heapArrayStride_ } , pEmbeddedSampler{ pEmbeddedSampler_ } , useCombinedImageSamplerIndex{ useCombinedImageSamplerIndex_ } , samplerHeapOffset{ samplerHeapOffset_ } , samplerShaderRecordOffset{ samplerShaderRecordOffset_ } , samplerHeapIndexStride{ samplerHeapIndexStride_ } , samplerHeapArrayStride{ samplerHeapArrayStride_ } { } VULKAN_HPP_CONSTEXPR DescriptorMappingSourceShaderRecordIndexEXT( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorMappingSourceShaderRecordIndexEXT( VkDescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorMappingSourceShaderRecordIndexEXT( *reinterpret_cast( &rhs ) ) { } DescriptorMappingSourceShaderRecordIndexEXT & operator=( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorMappingSourceShaderRecordIndexEXT & operator=( VkDescriptorMappingSourceShaderRecordIndexEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapOffset( uint32_t heapOffset_ ) & VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapOffset( uint32_t heapOffset_ ) && VULKAN_HPP_NOEXCEPT { heapOffset = heapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setShaderRecordOffset( uint32_t shaderRecordOffset_ ) & VULKAN_HPP_NOEXCEPT { shaderRecordOffset = shaderRecordOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setShaderRecordOffset( uint32_t shaderRecordOffset_ ) && VULKAN_HPP_NOEXCEPT { shaderRecordOffset = shaderRecordOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapIndexStride( uint32_t heapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapIndexStride( uint32_t heapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { heapIndexStride = heapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setHeapArrayStride( uint32_t heapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setHeapArrayStride( uint32_t heapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { heapArrayStride = heapArrayStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) & VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setPEmbeddedSampler( const SamplerCreateInfo * pEmbeddedSampler_ ) && VULKAN_HPP_NOEXCEPT { pEmbeddedSampler = pEmbeddedSampler_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) & VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setUseCombinedImageSamplerIndex( Bool32 useCombinedImageSamplerIndex_ ) && VULKAN_HPP_NOEXCEPT { useCombinedImageSamplerIndex = useCombinedImageSamplerIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapOffset( uint32_t samplerHeapOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapOffset = samplerHeapOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerShaderRecordOffset( uint32_t samplerShaderRecordOffset_ ) & VULKAN_HPP_NOEXCEPT { samplerShaderRecordOffset = samplerShaderRecordOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerShaderRecordOffset( uint32_t samplerShaderRecordOffset_ ) && VULKAN_HPP_NOEXCEPT { samplerShaderRecordOffset = samplerShaderRecordOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapIndexStride( uint32_t samplerHeapIndexStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapIndexStride = samplerHeapIndexStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT & setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) & VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceShaderRecordIndexEXT && setSamplerHeapArrayStride( uint32_t samplerHeapArrayStride_ ) && VULKAN_HPP_NOEXCEPT { samplerHeapArrayStride = samplerHeapArrayStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceShaderRecordIndexEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceShaderRecordIndexEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceShaderRecordIndexEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorMappingSourceShaderRecordIndexEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( heapOffset, shaderRecordOffset, heapIndexStride, heapArrayStride, pEmbeddedSampler, useCombinedImageSamplerIndex, samplerHeapOffset, samplerShaderRecordOffset, samplerHeapIndexStride, samplerHeapArrayStride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorMappingSourceShaderRecordIndexEXT const & ) const = default; #else bool operator==( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( heapOffset == rhs.heapOffset ) && ( shaderRecordOffset == rhs.shaderRecordOffset ) && ( heapIndexStride == rhs.heapIndexStride ) && ( heapArrayStride == rhs.heapArrayStride ) && ( pEmbeddedSampler == rhs.pEmbeddedSampler ) && ( useCombinedImageSamplerIndex == rhs.useCombinedImageSamplerIndex ) && ( samplerHeapOffset == rhs.samplerHeapOffset ) && ( samplerShaderRecordOffset == rhs.samplerShaderRecordOffset ) && ( samplerHeapIndexStride == rhs.samplerHeapIndexStride ) && ( samplerHeapArrayStride == rhs.samplerHeapArrayStride ); # endif } bool operator!=( DescriptorMappingSourceShaderRecordIndexEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t heapOffset = {}; uint32_t shaderRecordOffset = {}; uint32_t heapIndexStride = {}; uint32_t heapArrayStride = {}; const SamplerCreateInfo * pEmbeddedSampler = {}; Bool32 useCombinedImageSamplerIndex = {}; uint32_t samplerHeapOffset = {}; uint32_t samplerShaderRecordOffset = {}; uint32_t samplerHeapIndexStride = {}; uint32_t samplerHeapArrayStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceShaderRecordIndexEXT; }; #endif union DescriptorMappingSourceDataEXT { using NativeType = VkDescriptorMappingSourceDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceConstantOffsetEXT constantOffset_ = {} ) : constantOffset( constantOffset_ ) { } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourcePushIndexEXT pushIndex_ ) : pushIndex( pushIndex_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectIndexEXT indirectIndex_ ) : indirectIndex( indirectIndex_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray_ ) : indirectIndexArray( indirectIndexArray_ ) { } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceHeapDataEXT heapData_ ) : heapData( heapData_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( uint32_t uint32_t_ ) : pushDataOffset( uint32_t_ ) {} VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceIndirectAddressEXT indirectAddress_ ) : indirectAddress( indirectAddress_ ) { } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT( DescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex_ ) : shaderRecordIndex( shaderRecordIndex_ ) { } #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setConstantOffset( DescriptorMappingSourceConstantOffsetEXT const & constantOffset_ ) & VULKAN_HPP_NOEXCEPT { constantOffset = constantOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setConstantOffset( DescriptorMappingSourceConstantOffsetEXT const & constantOffset_ ) && VULKAN_HPP_NOEXCEPT { constantOffset = constantOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushIndex( DescriptorMappingSourcePushIndexEXT const & pushIndex_ ) & VULKAN_HPP_NOEXCEPT { pushIndex = pushIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushIndex( DescriptorMappingSourcePushIndexEXT const & pushIndex_ ) && VULKAN_HPP_NOEXCEPT { pushIndex = pushIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectIndex( DescriptorMappingSourceIndirectIndexEXT const & indirectIndex_ ) & VULKAN_HPP_NOEXCEPT { indirectIndex = indirectIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setIndirectIndex( DescriptorMappingSourceIndirectIndexEXT const & indirectIndex_ ) && VULKAN_HPP_NOEXCEPT { indirectIndex = indirectIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectIndexArray( DescriptorMappingSourceIndirectIndexArrayEXT const & indirectIndexArray_ ) & VULKAN_HPP_NOEXCEPT { indirectIndexArray = indirectIndexArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setIndirectIndexArray( DescriptorMappingSourceIndirectIndexArrayEXT const & indirectIndexArray_ ) && VULKAN_HPP_NOEXCEPT { indirectIndexArray = indirectIndexArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setHeapData( DescriptorMappingSourceHeapDataEXT const & heapData_ ) & VULKAN_HPP_NOEXCEPT { heapData = heapData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setHeapData( DescriptorMappingSourceHeapDataEXT const & heapData_ ) && VULKAN_HPP_NOEXCEPT { heapData = heapData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushDataOffset( uint32_t pushDataOffset_ ) & VULKAN_HPP_NOEXCEPT { pushDataOffset = pushDataOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushDataOffset( uint32_t pushDataOffset_ ) && VULKAN_HPP_NOEXCEPT { pushDataOffset = pushDataOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setPushAddressOffset( uint32_t pushAddressOffset_ ) & VULKAN_HPP_NOEXCEPT { pushAddressOffset = pushAddressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setPushAddressOffset( uint32_t pushAddressOffset_ ) && VULKAN_HPP_NOEXCEPT { pushAddressOffset = pushAddressOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setIndirectAddress( DescriptorMappingSourceIndirectAddressEXT const & indirectAddress_ ) & VULKAN_HPP_NOEXCEPT { indirectAddress = indirectAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setIndirectAddress( DescriptorMappingSourceIndirectAddressEXT const & indirectAddress_ ) && VULKAN_HPP_NOEXCEPT { indirectAddress = indirectAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordIndex( DescriptorMappingSourceShaderRecordIndexEXT const & shaderRecordIndex_ ) & VULKAN_HPP_NOEXCEPT { shaderRecordIndex = shaderRecordIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordIndex( DescriptorMappingSourceShaderRecordIndexEXT const & shaderRecordIndex_ ) && VULKAN_HPP_NOEXCEPT { shaderRecordIndex = shaderRecordIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordDataOffset( uint32_t shaderRecordDataOffset_ ) & VULKAN_HPP_NOEXCEPT { shaderRecordDataOffset = shaderRecordDataOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordDataOffset( uint32_t shaderRecordDataOffset_ ) && VULKAN_HPP_NOEXCEPT { shaderRecordDataOffset = shaderRecordDataOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT & setShaderRecordAddressOffset( uint32_t shaderRecordAddressOffset_ ) & VULKAN_HPP_NOEXCEPT { shaderRecordAddressOffset = shaderRecordAddressOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorMappingSourceDataEXT && setShaderRecordAddressOffset( uint32_t shaderRecordAddressOffset_ ) && VULKAN_HPP_NOEXCEPT { shaderRecordAddressOffset = shaderRecordAddressOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorMappingSourceDataEXT const &() const { return *reinterpret_cast( this ); } operator VkDescriptorMappingSourceDataEXT &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS DescriptorMappingSourceConstantOffsetEXT constantOffset; DescriptorMappingSourcePushIndexEXT pushIndex; DescriptorMappingSourceIndirectIndexEXT indirectIndex; DescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray; DescriptorMappingSourceHeapDataEXT heapData; uint32_t pushDataOffset; uint32_t pushAddressOffset; DescriptorMappingSourceIndirectAddressEXT indirectAddress; DescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex; uint32_t shaderRecordDataOffset; uint32_t shaderRecordAddressOffset; #else VkDescriptorMappingSourceConstantOffsetEXT constantOffset; VkDescriptorMappingSourcePushIndexEXT pushIndex; VkDescriptorMappingSourceIndirectIndexEXT indirectIndex; VkDescriptorMappingSourceIndirectIndexArrayEXT indirectIndexArray; VkDescriptorMappingSourceHeapDataEXT heapData; uint32_t pushDataOffset; uint32_t pushAddressOffset; VkDescriptorMappingSourceIndirectAddressEXT indirectAddress; VkDescriptorMappingSourceShaderRecordIndexEXT shaderRecordIndex; uint32_t shaderRecordDataOffset; uint32_t shaderRecordAddressOffset; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorMappingSourceDataEXT; }; #endif // wrapper struct for struct VkDescriptorPoolSize, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolSize.html struct DescriptorPoolSize { using NativeType = VkDescriptorPoolSize; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorType type_ = DescriptorType::eSampler, uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT : type{ type_ } , descriptorCount{ descriptorCount_ } { } VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolSize( *reinterpret_cast( &rhs ) ) {} DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setType( DescriptorType type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize && setType( DescriptorType type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize && setDescriptorCount( uint32_t descriptorCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorPoolSize const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolSize const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorPoolSize *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( type, descriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorPoolSize const & ) const = default; #else bool operator==( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( type == rhs.type ) && ( descriptorCount == rhs.descriptorCount ); # endif } bool operator!=( DescriptorPoolSize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DescriptorType type = DescriptorType::eSampler; uint32_t descriptorCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorPoolSize; }; #endif // wrapper struct for struct VkDescriptorPoolCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolCreateInfo.html struct DescriptorPoolCreateInfo { using NativeType = VkDescriptorPoolCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const DescriptorPoolSize * pPoolSizes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , maxSets{ maxSets_ } , poolSizeCount{ poolSizeCount_ } , pPoolSizes{ pPoolSizes_ } { } VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorPoolCreateInfo( DescriptorPoolCreateFlags flags_, uint32_t maxSets_, ArrayProxyNoTemporaries const & poolSizes_, const void * pNext_ = nullptr ) : pNext( pNext_ ), flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setFlags( DescriptorPoolCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo && setFlags( DescriptorPoolCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) & VULKAN_HPP_NOEXCEPT { maxSets = maxSets_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo && setMaxSets( uint32_t maxSets_ ) && VULKAN_HPP_NOEXCEPT { maxSets = maxSets_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) & VULKAN_HPP_NOEXCEPT { poolSizeCount = poolSizeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo && setPoolSizeCount( uint32_t poolSizeCount_ ) && VULKAN_HPP_NOEXCEPT { poolSizeCount = poolSizeCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & setPPoolSizes( const DescriptorPoolSize * pPoolSizes_ ) & VULKAN_HPP_NOEXCEPT { pPoolSizes = pPoolSizes_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo && setPPoolSizes( const DescriptorPoolSize * pPoolSizes_ ) && VULKAN_HPP_NOEXCEPT { pPoolSizes = pPoolSizes_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorPoolCreateInfo & setPoolSizes( ArrayProxyNoTemporaries const & poolSizes_ ) VULKAN_HPP_NOEXCEPT { poolSizeCount = static_cast( poolSizes_.size() ); pPoolSizes = poolSizes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorPoolCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorPoolCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, maxSets, poolSizeCount, pPoolSizes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorPoolCreateInfo const & ) const = default; #else bool operator==( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( maxSets == rhs.maxSets ) && ( poolSizeCount == rhs.poolSizeCount ) && ( pPoolSizes == rhs.pPoolSizes ); # endif } bool operator!=( DescriptorPoolCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorPoolCreateInfo; const void * pNext = {}; DescriptorPoolCreateFlags flags = {}; uint32_t maxSets = {}; uint32_t poolSizeCount = {}; const DescriptorPoolSize * pPoolSizes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorPoolCreateInfo; }; #endif template <> struct CppType { using Type = DescriptorPoolCreateInfo; }; // wrapper struct for struct VkDescriptorPoolInlineUniformBlockCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorPoolInlineUniformBlockCreateInfo.html struct DescriptorPoolInlineUniformBlockCreateInfo { using NativeType = VkDescriptorPoolInlineUniformBlockCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ } { } VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorPoolInlineUniformBlockCreateInfo( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorPoolInlineUniformBlockCreateInfo( *reinterpret_cast( &rhs ) ) { } DescriptorPoolInlineUniformBlockCreateInfo & operator=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorPoolInlineUniformBlockCreateInfo & operator=( VkDescriptorPoolInlineUniformBlockCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) & VULKAN_HPP_NOEXCEPT { maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfo && setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) && VULKAN_HPP_NOEXCEPT { maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorPoolInlineUniformBlockCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolInlineUniformBlockCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorPoolInlineUniformBlockCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorPoolInlineUniformBlockCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxInlineUniformBlockBindings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfo const & ) const = default; #else bool operator==( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); # endif } bool operator!=( DescriptorPoolInlineUniformBlockCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfo; const void * pNext = {}; uint32_t maxInlineUniformBlockBindings = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorPoolInlineUniformBlockCreateInfo; }; #endif template <> struct CppType { using Type = DescriptorPoolInlineUniformBlockCreateInfo; }; using DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo; // wrapper struct for struct VkDescriptorSetAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAllocateInfo.html struct DescriptorSetAllocateInfo { using NativeType = VkDescriptorSetAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorPool{ descriptorPool_ } , descriptorSetCount{ descriptorSetCount_ } , pSetLayouts{ pSetLayouts_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetAllocateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetAllocateInfo( DescriptorPool descriptorPool_, ArrayProxyNoTemporaries const & setLayouts_, const void * pNext_ = nullptr ) : pNext( pNext_ ), descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorPool( DescriptorPool descriptorPool_ ) & VULKAN_HPP_NOEXCEPT { descriptorPool = descriptorPool_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo && setDescriptorPool( DescriptorPool descriptorPool_ ) && VULKAN_HPP_NOEXCEPT { descriptorPool = descriptorPool_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo && setDescriptorSetCount( uint32_t descriptorSetCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) & VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo && setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) && VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetAllocateInfo & setSetLayouts( ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast( setLayouts_.size() ); pSetLayouts = setLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorPool, descriptorSetCount, pSetLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetAllocateInfo const & ) const = default; #else bool operator==( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPool == rhs.descriptorPool ) && ( descriptorSetCount == rhs.descriptorSetCount ) && ( pSetLayouts == rhs.pSetLayouts ); # endif } bool operator!=( DescriptorSetAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetAllocateInfo; const void * pNext = {}; DescriptorPool descriptorPool = {}; uint32_t descriptorSetCount = {}; const DescriptorSetLayout * pSetLayouts = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetAllocateInfo; }; #endif template <> struct CppType { using Type = DescriptorSetAllocateInfo; }; // wrapper struct for struct VkDescriptorSetAndBindingMappingEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetAndBindingMappingEXT.html struct DescriptorSetAndBindingMappingEXT { using NativeType = VkDescriptorSetAndBindingMappingEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAndBindingMappingEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT( uint32_t descriptorSet_ = {}, uint32_t firstBinding_ = {}, uint32_t bindingCount_ = {}, SpirvResourceTypeFlagsEXT resourceMask_ = {}, DescriptorMappingSourceEXT source_ = DescriptorMappingSourceEXT::eHeapWithConstantOffset, DescriptorMappingSourceDataEXT sourceData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorSet{ descriptorSet_ } , firstBinding{ firstBinding_ } , bindingCount{ bindingCount_ } , resourceMask{ resourceMask_ } , source{ source_ } , sourceData{ sourceData_ } { } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT( DescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetAndBindingMappingEXT( VkDescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetAndBindingMappingEXT( *reinterpret_cast( &rhs ) ) { } DescriptorSetAndBindingMappingEXT & operator=( DescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetAndBindingMappingEXT & operator=( VkDescriptorSetAndBindingMappingEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setDescriptorSet( uint32_t descriptorSet_ ) & VULKAN_HPP_NOEXCEPT { descriptorSet = descriptorSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setDescriptorSet( uint32_t descriptorSet_ ) && VULKAN_HPP_NOEXCEPT { descriptorSet = descriptorSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setFirstBinding( uint32_t firstBinding_ ) & VULKAN_HPP_NOEXCEPT { firstBinding = firstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setFirstBinding( uint32_t firstBinding_ ) && VULKAN_HPP_NOEXCEPT { firstBinding = firstBinding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setBindingCount( uint32_t bindingCount_ ) & VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setBindingCount( uint32_t bindingCount_ ) && VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setResourceMask( SpirvResourceTypeFlagsEXT resourceMask_ ) & VULKAN_HPP_NOEXCEPT { resourceMask = resourceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setResourceMask( SpirvResourceTypeFlagsEXT resourceMask_ ) && VULKAN_HPP_NOEXCEPT { resourceMask = resourceMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setSource( DescriptorMappingSourceEXT source_ ) & VULKAN_HPP_NOEXCEPT { source = source_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setSource( DescriptorMappingSourceEXT source_ ) && VULKAN_HPP_NOEXCEPT { source = source_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT & setSourceData( DescriptorMappingSourceDataEXT const & sourceData_ ) & VULKAN_HPP_NOEXCEPT { sourceData = sourceData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetAndBindingMappingEXT && setSourceData( DescriptorMappingSourceDataEXT const & sourceData_ ) && VULKAN_HPP_NOEXCEPT { sourceData = sourceData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetAndBindingMappingEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetAndBindingMappingEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetAndBindingMappingEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetAndBindingMappingEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorSet, firstBinding, bindingCount, resourceMask, source, sourceData ); } #endif public: StructureType sType = StructureType::eDescriptorSetAndBindingMappingEXT; const void * pNext = {}; uint32_t descriptorSet = {}; uint32_t firstBinding = {}; uint32_t bindingCount = {}; SpirvResourceTypeFlagsEXT resourceMask = {}; DescriptorMappingSourceEXT source = DescriptorMappingSourceEXT::eHeapWithConstantOffset; DescriptorMappingSourceDataEXT sourceData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetAndBindingMappingEXT; }; #endif template <> struct CppType { using Type = DescriptorSetAndBindingMappingEXT; }; // wrapper struct for struct VkDescriptorSetBindingReferenceVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetBindingReferenceVALVE.html struct DescriptorSetBindingReferenceVALVE { using NativeType = VkDescriptorSetBindingReferenceVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetBindingReferenceVALVE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorSetLayout{ descriptorSetLayout_ } , binding{ binding_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetBindingReferenceVALVE( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetBindingReferenceVALVE( *reinterpret_cast( &rhs ) ) { } DescriptorSetBindingReferenceVALVE & operator=( DescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetBindingReferenceVALVE & operator=( VkDescriptorSetBindingReferenceVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE && setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE & setBinding( uint32_t binding_ ) & VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetBindingReferenceVALVE && setBinding( uint32_t binding_ ) && VULKAN_HPP_NOEXCEPT { binding = binding_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetBindingReferenceVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetBindingReferenceVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetBindingReferenceVALVE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetBindingReferenceVALVE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorSetLayout, binding ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetBindingReferenceVALVE const & ) const = default; #else bool operator==( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( binding == rhs.binding ); # endif } bool operator!=( DescriptorSetBindingReferenceVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetBindingReferenceVALVE; const void * pNext = {}; DescriptorSetLayout descriptorSetLayout = {}; uint32_t binding = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetBindingReferenceVALVE; }; #endif template <> struct CppType { using Type = DescriptorSetBindingReferenceVALVE; }; // wrapper struct for struct VkDescriptorSetLayoutBinding, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBinding.html struct DescriptorSetLayoutBinding { using NativeType = VkDescriptorSetLayoutBinding; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, DescriptorType descriptorType_ = DescriptorType::eSampler, uint32_t descriptorCount_ = {}, ShaderStageFlags stageFlags_ = {}, const Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT : binding{ binding_ } , descriptorType{ descriptorType_ } , descriptorCount{ descriptorCount_ } , stageFlags{ stageFlags_ } , pImmutableSamplers{ pImmutableSamplers_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutBinding( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutBinding( uint32_t binding_, DescriptorType descriptorType_, ShaderStageFlags stageFlags_, ArrayProxyNoTemporaries const & immutableSamplers_ ) : binding( binding_ ) , descriptorType( descriptorType_ ) , descriptorCount( static_cast( immutableSamplers_.size() ) ) , stageFlags( stageFlags_ ) , pImmutableSamplers( immutableSamplers_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) & VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding && setBinding( uint32_t binding_ ) && VULKAN_HPP_NOEXCEPT { binding = binding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorType( DescriptorType descriptorType_ ) & VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding && setDescriptorType( DescriptorType descriptorType_ ) && VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding && setDescriptorCount( uint32_t descriptorCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setStageFlags( ShaderStageFlags stageFlags_ ) & VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding && setStageFlags( ShaderStageFlags stageFlags_ ) && VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & setPImmutableSamplers( const Sampler * pImmutableSamplers_ ) & VULKAN_HPP_NOEXCEPT { pImmutableSamplers = pImmutableSamplers_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding && setPImmutableSamplers( const Sampler * pImmutableSamplers_ ) && VULKAN_HPP_NOEXCEPT { pImmutableSamplers = pImmutableSamplers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutBinding & setImmutableSamplers( ArrayProxyNoTemporaries const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT { descriptorCount = static_cast( immutableSamplers_.size() ); pImmutableSamplers = immutableSamplers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetLayoutBinding const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBinding const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetLayoutBinding *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( binding, descriptorType, descriptorCount, stageFlags, pImmutableSamplers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetLayoutBinding const & ) const = default; #else bool operator==( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( binding == rhs.binding ) && ( descriptorType == rhs.descriptorType ) && ( descriptorCount == rhs.descriptorCount ) && ( stageFlags == rhs.stageFlags ) && ( pImmutableSamplers == rhs.pImmutableSamplers ); # endif } bool operator!=( DescriptorSetLayoutBinding const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t binding = {}; DescriptorType descriptorType = DescriptorType::eSampler; uint32_t descriptorCount = {}; ShaderStageFlags stageFlags = {}; const Sampler * pImmutableSamplers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetLayoutBinding; }; #endif // wrapper struct for struct VkDescriptorSetLayoutBindingFlagsCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfo.html struct DescriptorSetLayoutBindingFlagsCreateInfo { using NativeType = VkDescriptorSetLayoutBindingFlagsCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, const DescriptorBindingFlags * pBindingFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , bindingCount{ bindingCount_ } , pBindingFlags{ pBindingFlags_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutBindingFlagsCreateInfo( ArrayProxyNoTemporaries const & bindingFlags_, const void * pNext_ = nullptr ) : pNext( pNext_ ), bindingCount( static_cast( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) & VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo && setBindingCount( uint32_t bindingCount_ ) && VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const DescriptorBindingFlags * pBindingFlags_ ) & VULKAN_HPP_NOEXCEPT { pBindingFlags = pBindingFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo && setPBindingFlags( const DescriptorBindingFlags * pBindingFlags_ ) && VULKAN_HPP_NOEXCEPT { pBindingFlags = pBindingFlags_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( ArrayProxyNoTemporaries const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT { bindingCount = static_cast( bindingFlags_.size() ); pBindingFlags = bindingFlags_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetLayoutBindingFlagsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutBindingFlagsCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetLayoutBindingFlagsCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, bindingCount, pBindingFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const & ) const = default; #else bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bindingCount == rhs.bindingCount ) && ( pBindingFlags == rhs.pBindingFlags ); # endif } bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo; const void * pNext = {}; uint32_t bindingCount = {}; const DescriptorBindingFlags * pBindingFlags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetLayoutBindingFlagsCreateInfo; }; #endif template <> struct CppType { using Type = DescriptorSetLayoutBindingFlagsCreateInfo; }; using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo; // wrapper struct for struct VkDescriptorSetLayoutCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutCreateInfo.html struct DescriptorSetLayoutCreateInfo { using NativeType = VkDescriptorSetLayoutCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const DescriptorSetLayoutBinding * pBindings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , bindingCount{ bindingCount_ } , pBindings{ pBindings_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateFlags flags_, ArrayProxyNoTemporaries const & bindings_, const void * pNext_ = nullptr ) : pNext( pNext_ ), flags( flags_ ), bindingCount( static_cast( bindings_.size() ) ), pBindings( bindings_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setFlags( DescriptorSetLayoutCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo && setFlags( DescriptorSetLayoutCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) & VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo && setBindingCount( uint32_t bindingCount_ ) && VULKAN_HPP_NOEXCEPT { bindingCount = bindingCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & setPBindings( const DescriptorSetLayoutBinding * pBindings_ ) & VULKAN_HPP_NOEXCEPT { pBindings = pBindings_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo && setPBindings( const DescriptorSetLayoutBinding * pBindings_ ) && VULKAN_HPP_NOEXCEPT { pBindings = pBindings_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetLayoutCreateInfo & setBindings( ArrayProxyNoTemporaries const & bindings_ ) VULKAN_HPP_NOEXCEPT { bindingCount = static_cast( bindings_.size() ); pBindings = bindings_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetLayoutCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetLayoutCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, bindingCount, pBindings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetLayoutCreateInfo const & ) const = default; #else bool operator==( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( bindingCount == rhs.bindingCount ) && ( pBindings == rhs.pBindings ); # endif } bool operator!=( DescriptorSetLayoutCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; const void * pNext = {}; DescriptorSetLayoutCreateFlags flags = {}; uint32_t bindingCount = {}; const DescriptorSetLayoutBinding * pBindings = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetLayoutCreateInfo; }; #endif template <> struct CppType { using Type = DescriptorSetLayoutCreateInfo; }; // wrapper struct for struct VkDescriptorSetLayoutHostMappingInfoVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutHostMappingInfoVALVE.html struct DescriptorSetLayoutHostMappingInfoVALVE { using NativeType = VkDescriptorSetLayoutHostMappingInfoVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorOffset{ descriptorOffset_ } , descriptorSize{ descriptorSize_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutHostMappingInfoVALVE( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutHostMappingInfoVALVE( *reinterpret_cast( &rhs ) ) { } DescriptorSetLayoutHostMappingInfoVALVE & operator=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetLayoutHostMappingInfoVALVE & operator=( VkDescriptorSetLayoutHostMappingInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorOffset( size_t descriptorOffset_ ) & VULKAN_HPP_NOEXCEPT { descriptorOffset = descriptorOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE && setDescriptorOffset( size_t descriptorOffset_ ) && VULKAN_HPP_NOEXCEPT { descriptorOffset = descriptorOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE & setDescriptorSize( uint32_t descriptorSize_ ) & VULKAN_HPP_NOEXCEPT { descriptorSize = descriptorSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutHostMappingInfoVALVE && setDescriptorSize( uint32_t descriptorSize_ ) && VULKAN_HPP_NOEXCEPT { descriptorSize = descriptorSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetLayoutHostMappingInfoVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutHostMappingInfoVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutHostMappingInfoVALVE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetLayoutHostMappingInfoVALVE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorOffset, descriptorSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetLayoutHostMappingInfoVALVE const & ) const = default; #else bool operator==( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorOffset == rhs.descriptorOffset ) && ( descriptorSize == rhs.descriptorSize ); # endif } bool operator!=( DescriptorSetLayoutHostMappingInfoVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetLayoutHostMappingInfoVALVE; void * pNext = {}; size_t descriptorOffset = {}; uint32_t descriptorSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetLayoutHostMappingInfoVALVE; }; #endif template <> struct CppType { using Type = DescriptorSetLayoutHostMappingInfoVALVE; }; // wrapper struct for struct VkDescriptorSetLayoutSupport, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetLayoutSupport.html struct DescriptorSetLayoutSupport { using NativeType = VkDescriptorSetLayoutSupport; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , supported{ supported_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetLayoutSupport( *reinterpret_cast( &rhs ) ) { } DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDescriptorSetLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetLayoutSupport const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetLayoutSupport *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, supported ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetLayoutSupport const & ) const = default; #else bool operator==( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supported == rhs.supported ); # endif } bool operator!=( DescriptorSetLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetLayoutSupport; void * pNext = {}; Bool32 supported = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetLayoutSupport; }; #endif template <> struct CppType { using Type = DescriptorSetLayoutSupport; }; using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; // wrapper struct for struct VkDescriptorSetVariableDescriptorCountAllocateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfo.html struct DescriptorSetVariableDescriptorCountAllocateInfo { using NativeType = VkDescriptorSetVariableDescriptorCountAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, const uint32_t * pDescriptorCounts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorSetCount{ descriptorSetCount_ } , pDescriptorCounts{ pDescriptorCounts_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetVariableDescriptorCountAllocateInfo( ArrayProxyNoTemporaries const & descriptorCounts_, const void * pNext_ = nullptr ) : pNext( pNext_ ), descriptorSetCount( static_cast( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo && setDescriptorSetCount( uint32_t descriptorSetCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) & VULKAN_HPP_NOEXCEPT { pDescriptorCounts = pDescriptorCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo && setPDescriptorCounts( const uint32_t * pDescriptorCounts_ ) && VULKAN_HPP_NOEXCEPT { pDescriptorCounts = pDescriptorCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( ArrayProxyNoTemporaries const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast( descriptorCounts_.size() ); pDescriptorCounts = descriptorCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorSetVariableDescriptorCountAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorSetCount, pDescriptorCounts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const & ) const = default; #else bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetCount == rhs.descriptorSetCount ) && ( pDescriptorCounts == rhs.pDescriptorCounts ); # endif } bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo; const void * pNext = {}; uint32_t descriptorSetCount = {}; const uint32_t * pDescriptorCounts = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountAllocateInfo; }; #endif template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountAllocateInfo; }; using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo; // wrapper struct for struct VkDescriptorSetVariableDescriptorCountLayoutSupport, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupport.html struct DescriptorSetVariableDescriptorCountLayoutSupport { using NativeType = VkDescriptorSetVariableDescriptorCountLayoutSupport; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxVariableDescriptorCount{ maxVariableDescriptorCount_ } { } VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast( &rhs ) ) { } DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDescriptorSetVariableDescriptorCountLayoutSupport const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountLayoutSupport const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorSetVariableDescriptorCountLayoutSupport *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxVariableDescriptorCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const & ) const = default; #else bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); # endif } bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport; void * pNext = {}; uint32_t maxVariableDescriptorCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountLayoutSupport; }; #endif template <> struct CppType { using Type = DescriptorSetVariableDescriptorCountLayoutSupport; }; using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport; // wrapper struct for struct VkDescriptorUpdateTemplateEntry, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateEntry.html struct DescriptorUpdateTemplateEntry { using NativeType = VkDescriptorUpdateTemplateEntry; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, DescriptorType descriptorType_ = DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : dstBinding{ dstBinding_ } , dstArrayElement{ dstArrayElement_ } , descriptorCount{ descriptorCount_ } , descriptorType{ descriptorType_ } , offset{ offset_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorUpdateTemplateEntry( *reinterpret_cast( &rhs ) ) { } DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) & VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setDstBinding( uint32_t dstBinding_ ) && VULKAN_HPP_NOEXCEPT { dstBinding = dstBinding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) & VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setDstArrayElement( uint32_t dstArrayElement_ ) && VULKAN_HPP_NOEXCEPT { dstArrayElement = dstArrayElement_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setDescriptorCount( uint32_t descriptorCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorCount = descriptorCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setDescriptorType( DescriptorType descriptorType_ ) & VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setDescriptorType( DescriptorType descriptorType_ ) && VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setOffset( size_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry && setStride( size_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorUpdateTemplateEntry const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateEntry const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateEntry *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( dstBinding, dstArrayElement, descriptorCount, descriptorType, offset, stride ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorUpdateTemplateEntry const & ) const = default; #else bool operator==( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( dstBinding == rhs.dstBinding ) && ( dstArrayElement == rhs.dstArrayElement ) && ( descriptorCount == rhs.descriptorCount ) && ( descriptorType == rhs.descriptorType ) && ( offset == rhs.offset ) && ( stride == rhs.stride ); # endif } bool operator!=( DescriptorUpdateTemplateEntry const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t dstBinding = {}; uint32_t dstArrayElement = {}; uint32_t descriptorCount = {}; DescriptorType descriptorType = DescriptorType::eSampler; size_t offset = {}; size_t stride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorUpdateTemplateEntry; }; #endif using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; // wrapper struct for struct VkDescriptorUpdateTemplateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDescriptorUpdateTemplateCreateInfo.html struct DescriptorUpdateTemplateCreateInfo { using NativeType = VkDescriptorUpdateTemplateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ = {}, DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet, DescriptorSetLayout descriptorSetLayout_ = {}, PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ } , pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ } , templateType{ templateType_ } , descriptorSetLayout{ descriptorSetLayout_ } , pipelineBindPoint{ pipelineBindPoint_ } , pipelineLayout{ pipelineLayout_ } , set{ set_ } { } VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateFlags flags_, ArrayProxyNoTemporaries const & descriptorUpdateEntries_, DescriptorUpdateTemplateType templateType_ = DescriptorUpdateTemplateType::eDescriptorSet, DescriptorSetLayout descriptorSetLayout_ = {}, PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , descriptorUpdateEntryCount( static_cast( descriptorUpdateEntries_.size() ) ) , pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ) , templateType( templateType_ ) , descriptorSetLayout( descriptorSetLayout_ ) , pipelineBindPoint( pipelineBindPoint_ ) , pipelineLayout( pipelineLayout_ ) , set( set_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setFlags( DescriptorUpdateTemplateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setFlags( DescriptorUpdateTemplateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = descriptorUpdateEntryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = descriptorUpdateEntryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) & VULKAN_HPP_NOEXCEPT { pDescriptorUpdateEntries = pDescriptorUpdateEntries_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setPDescriptorUpdateEntries( const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries_ ) && VULKAN_HPP_NOEXCEPT { pDescriptorUpdateEntries = pDescriptorUpdateEntries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( ArrayProxyNoTemporaries const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT { descriptorUpdateEntryCount = static_cast( descriptorUpdateEntries_.size() ); pDescriptorUpdateEntries = descriptorUpdateEntries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setTemplateType( DescriptorUpdateTemplateType templateType_ ) & VULKAN_HPP_NOEXCEPT { templateType = templateType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setTemplateType( DescriptorUpdateTemplateType templateType_ ) && VULKAN_HPP_NOEXCEPT { templateType = templateType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setDescriptorSetLayout( DescriptorSetLayout descriptorSetLayout_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetLayout = descriptorSetLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) & VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) && VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setPipelineLayout( PipelineLayout pipelineLayout_ ) & VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setPipelineLayout( PipelineLayout pipelineLayout_ ) && VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) & VULKAN_HPP_NOEXCEPT { set = set_; return *this; } VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo && setSet( uint32_t set_ ) && VULKAN_HPP_NOEXCEPT { set = set_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDescriptorUpdateTemplateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDescriptorUpdateTemplateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, descriptorUpdateEntryCount, pDescriptorUpdateEntries, templateType, descriptorSetLayout, pipelineBindPoint, pipelineLayout, set ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DescriptorUpdateTemplateCreateInfo const & ) const = default; #else bool operator==( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) && ( templateType == rhs.templateType ) && ( descriptorSetLayout == rhs.descriptorSetLayout ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipelineLayout == rhs.pipelineLayout ) && ( set == rhs.set ); # endif } bool operator!=( DescriptorUpdateTemplateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; const void * pNext = {}; DescriptorUpdateTemplateCreateFlags flags = {}; uint32_t descriptorUpdateEntryCount = {}; const DescriptorUpdateTemplateEntry * pDescriptorUpdateEntries = {}; DescriptorUpdateTemplateType templateType = DescriptorUpdateTemplateType::eDescriptorSet; DescriptorSetLayout descriptorSetLayout = {}; PipelineBindPoint pipelineBindPoint = PipelineBindPoint::eGraphics; PipelineLayout pipelineLayout = {}; uint32_t set = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DescriptorUpdateTemplateCreateInfo; }; #endif template <> struct CppType { using Type = DescriptorUpdateTemplateCreateInfo; }; using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; // wrapper struct for struct VkDeviceAddressBindingCallbackDataEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceAddressBindingCallbackDataEXT.html struct DeviceAddressBindingCallbackDataEXT { using NativeType = VkDeviceAddressBindingCallbackDataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceAddressBindingCallbackDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingFlagsEXT flags_ = {}, DeviceAddress baseAddress_ = {}, DeviceSize size_ = {}, DeviceAddressBindingTypeEXT bindingType_ = DeviceAddressBindingTypeEXT::eBind, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , baseAddress{ baseAddress_ } , size{ size_ } , bindingType{ bindingType_ } { } VULKAN_HPP_CONSTEXPR DeviceAddressBindingCallbackDataEXT( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceAddressBindingCallbackDataEXT( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceAddressBindingCallbackDataEXT( *reinterpret_cast( &rhs ) ) { } DeviceAddressBindingCallbackDataEXT & operator=( DeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceAddressBindingCallbackDataEXT & operator=( VkDeviceAddressBindingCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setFlags( DeviceAddressBindingFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT && setFlags( DeviceAddressBindingFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBaseAddress( DeviceAddress baseAddress_ ) & VULKAN_HPP_NOEXCEPT { baseAddress = baseAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT && setBaseAddress( DeviceAddress baseAddress_ ) && VULKAN_HPP_NOEXCEPT { baseAddress = baseAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT & setBindingType( DeviceAddressBindingTypeEXT bindingType_ ) & VULKAN_HPP_NOEXCEPT { bindingType = bindingType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceAddressBindingCallbackDataEXT && setBindingType( DeviceAddressBindingTypeEXT bindingType_ ) && VULKAN_HPP_NOEXCEPT { bindingType = bindingType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceAddressBindingCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceAddressBindingCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceAddressBindingCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceAddressBindingCallbackDataEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, baseAddress, size, bindingType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceAddressBindingCallbackDataEXT const & ) const = default; #else bool operator==( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( baseAddress == rhs.baseAddress ) && ( size == rhs.size ) && ( bindingType == rhs.bindingType ); # endif } bool operator!=( DeviceAddressBindingCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceAddressBindingCallbackDataEXT; void * pNext = {}; DeviceAddressBindingFlagsEXT flags = {}; DeviceAddress baseAddress = {}; DeviceSize size = {}; DeviceAddressBindingTypeEXT bindingType = DeviceAddressBindingTypeEXT::eBind; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceAddressBindingCallbackDataEXT; }; #endif template <> struct CppType { using Type = DeviceAddressBindingCallbackDataEXT; }; // wrapper struct for struct VkDeviceBufferMemoryRequirements, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceBufferMemoryRequirements.html struct DeviceBufferMemoryRequirements { using NativeType = VkDeviceBufferMemoryRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceBufferMemoryRequirements; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const BufferCreateInfo * pCreateInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pCreateInfo{ pCreateInfo_ } { } VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceBufferMemoryRequirements( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceBufferMemoryRequirements( *reinterpret_cast( &rhs ) ) { } DeviceBufferMemoryRequirements & operator=( DeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceBufferMemoryRequirements & operator=( VkDeviceBufferMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements & setPCreateInfo( const BufferCreateInfo * pCreateInfo_ ) & VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceBufferMemoryRequirements && setPCreateInfo( const BufferCreateInfo * pCreateInfo_ ) && VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceBufferMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceBufferMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceBufferMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceBufferMemoryRequirements *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pCreateInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceBufferMemoryRequirements const & ) const = default; #else bool operator==( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); # endif } bool operator!=( DeviceBufferMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceBufferMemoryRequirements; const void * pNext = {}; const BufferCreateInfo * pCreateInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceBufferMemoryRequirements; }; #endif template <> struct CppType { using Type = DeviceBufferMemoryRequirements; }; using DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements; // wrapper struct for struct VkDeviceQueueCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueCreateInfo.html struct DeviceQueueCreateInfo { using NativeType = VkDeviceQueueCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float * pQueuePriorities_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , queueFamilyIndex{ queueFamilyIndex_ } , queueCount{ queueCount_ } , pQueuePriorities{ pQueuePriorities_ } { } VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceQueueCreateInfo( DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, ArrayProxyNoTemporaries const & queuePriorities_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , queueFamilyIndex( queueFamilyIndex_ ) , queueCount( static_cast( queuePriorities_.size() ) ) , pQueuePriorities( queuePriorities_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setFlags( DeviceQueueCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo && setFlags( DeviceQueueCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo && setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) & VULKAN_HPP_NOEXCEPT { queueCount = queueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo && setQueueCount( uint32_t queueCount_ ) && VULKAN_HPP_NOEXCEPT { queueCount = queueCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & setPQueuePriorities( const float * pQueuePriorities_ ) & VULKAN_HPP_NOEXCEPT { pQueuePriorities = pQueuePriorities_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo && setPQueuePriorities( const float * pQueuePriorities_ ) && VULKAN_HPP_NOEXCEPT { pQueuePriorities = pQueuePriorities_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceQueueCreateInfo & setQueuePriorities( ArrayProxyNoTemporaries const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT { queueCount = static_cast( queuePriorities_.size() ); pQueuePriorities = queuePriorities_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceQueueCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceQueueCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, queueFamilyIndex, queueCount, pQueuePriorities ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceQueueCreateInfo const & ) const = default; #else bool operator==( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueCount == rhs.queueCount ) && ( pQueuePriorities == rhs.pQueuePriorities ); # endif } bool operator!=( DeviceQueueCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceQueueCreateInfo; const void * pNext = {}; DeviceQueueCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; uint32_t queueCount = {}; const float * pQueuePriorities = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceQueueCreateInfo; }; #endif template <> struct CppType { using Type = DeviceQueueCreateInfo; }; // wrapper struct for struct VkPhysicalDeviceFeatures, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures.html struct PhysicalDeviceFeatures { using NativeType = VkPhysicalDeviceFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( Bool32 robustBufferAccess_ = {}, Bool32 fullDrawIndexUint32_ = {}, Bool32 imageCubeArray_ = {}, Bool32 independentBlend_ = {}, Bool32 geometryShader_ = {}, Bool32 tessellationShader_ = {}, Bool32 sampleRateShading_ = {}, Bool32 dualSrcBlend_ = {}, Bool32 logicOp_ = {}, Bool32 multiDrawIndirect_ = {}, Bool32 drawIndirectFirstInstance_ = {}, Bool32 depthClamp_ = {}, Bool32 depthBiasClamp_ = {}, Bool32 fillModeNonSolid_ = {}, Bool32 depthBounds_ = {}, Bool32 wideLines_ = {}, Bool32 largePoints_ = {}, Bool32 alphaToOne_ = {}, Bool32 multiViewport_ = {}, Bool32 samplerAnisotropy_ = {}, Bool32 textureCompressionETC2_ = {}, Bool32 textureCompressionASTC_LDR_ = {}, Bool32 textureCompressionBC_ = {}, Bool32 occlusionQueryPrecise_ = {}, Bool32 pipelineStatisticsQuery_ = {}, Bool32 vertexPipelineStoresAndAtomics_ = {}, Bool32 fragmentStoresAndAtomics_ = {}, Bool32 shaderTessellationAndGeometryPointSize_ = {}, Bool32 shaderImageGatherExtended_ = {}, Bool32 shaderStorageImageExtendedFormats_ = {}, Bool32 shaderStorageImageMultisample_ = {}, Bool32 shaderStorageImageReadWithoutFormat_ = {}, Bool32 shaderStorageImageWriteWithoutFormat_ = {}, Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, Bool32 shaderClipDistance_ = {}, Bool32 shaderCullDistance_ = {}, Bool32 shaderFloat64_ = {}, Bool32 shaderInt64_ = {}, Bool32 shaderInt16_ = {}, Bool32 shaderResourceResidency_ = {}, Bool32 shaderResourceMinLod_ = {}, Bool32 sparseBinding_ = {}, Bool32 sparseResidencyBuffer_ = {}, Bool32 sparseResidencyImage2D_ = {}, Bool32 sparseResidencyImage3D_ = {}, Bool32 sparseResidency2Samples_ = {}, Bool32 sparseResidency4Samples_ = {}, Bool32 sparseResidency8Samples_ = {}, Bool32 sparseResidency16Samples_ = {}, Bool32 sparseResidencyAliased_ = {}, Bool32 variableMultisampleRate_ = {}, Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT : robustBufferAccess{ robustBufferAccess_ } , fullDrawIndexUint32{ fullDrawIndexUint32_ } , imageCubeArray{ imageCubeArray_ } , independentBlend{ independentBlend_ } , geometryShader{ geometryShader_ } , tessellationShader{ tessellationShader_ } , sampleRateShading{ sampleRateShading_ } , dualSrcBlend{ dualSrcBlend_ } , logicOp{ logicOp_ } , multiDrawIndirect{ multiDrawIndirect_ } , drawIndirectFirstInstance{ drawIndirectFirstInstance_ } , depthClamp{ depthClamp_ } , depthBiasClamp{ depthBiasClamp_ } , fillModeNonSolid{ fillModeNonSolid_ } , depthBounds{ depthBounds_ } , wideLines{ wideLines_ } , largePoints{ largePoints_ } , alphaToOne{ alphaToOne_ } , multiViewport{ multiViewport_ } , samplerAnisotropy{ samplerAnisotropy_ } , textureCompressionETC2{ textureCompressionETC2_ } , textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ } , textureCompressionBC{ textureCompressionBC_ } , occlusionQueryPrecise{ occlusionQueryPrecise_ } , pipelineStatisticsQuery{ pipelineStatisticsQuery_ } , vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ } , fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ } , shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ } , shaderImageGatherExtended{ shaderImageGatherExtended_ } , shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ } , shaderStorageImageMultisample{ shaderStorageImageMultisample_ } , shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ } , shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ } , shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ } , shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ } , shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ } , shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ } , shaderClipDistance{ shaderClipDistance_ } , shaderCullDistance{ shaderCullDistance_ } , shaderFloat64{ shaderFloat64_ } , shaderInt64{ shaderInt64_ } , shaderInt16{ shaderInt16_ } , shaderResourceResidency{ shaderResourceResidency_ } , shaderResourceMinLod{ shaderResourceMinLod_ } , sparseBinding{ sparseBinding_ } , sparseResidencyBuffer{ sparseResidencyBuffer_ } , sparseResidencyImage2D{ sparseResidencyImage2D_ } , sparseResidencyImage3D{ sparseResidencyImage3D_ } , sparseResidency2Samples{ sparseResidency2Samples_ } , sparseResidency4Samples{ sparseResidency4Samples_ } , sparseResidency8Samples{ sparseResidency8Samples_ } , sparseResidency16Samples{ sparseResidency16Samples_ } , sparseResidencyAliased{ sparseResidencyAliased_ } , variableMultisampleRate{ variableMultisampleRate_ } , inheritedQueries{ inheritedQueries_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setRobustBufferAccess( Bool32 robustBufferAccess_ ) & VULKAN_HPP_NOEXCEPT { robustBufferAccess = robustBufferAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setRobustBufferAccess( Bool32 robustBufferAccess_ ) && VULKAN_HPP_NOEXCEPT { robustBufferAccess = robustBufferAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ ) & VULKAN_HPP_NOEXCEPT { fullDrawIndexUint32 = fullDrawIndexUint32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setFullDrawIndexUint32( Bool32 fullDrawIndexUint32_ ) && VULKAN_HPP_NOEXCEPT { fullDrawIndexUint32 = fullDrawIndexUint32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setImageCubeArray( Bool32 imageCubeArray_ ) & VULKAN_HPP_NOEXCEPT { imageCubeArray = imageCubeArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setImageCubeArray( Bool32 imageCubeArray_ ) && VULKAN_HPP_NOEXCEPT { imageCubeArray = imageCubeArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setIndependentBlend( Bool32 independentBlend_ ) & VULKAN_HPP_NOEXCEPT { independentBlend = independentBlend_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setIndependentBlend( Bool32 independentBlend_ ) && VULKAN_HPP_NOEXCEPT { independentBlend = independentBlend_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setGeometryShader( Bool32 geometryShader_ ) & VULKAN_HPP_NOEXCEPT { geometryShader = geometryShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setGeometryShader( Bool32 geometryShader_ ) && VULKAN_HPP_NOEXCEPT { geometryShader = geometryShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTessellationShader( Bool32 tessellationShader_ ) & VULKAN_HPP_NOEXCEPT { tessellationShader = tessellationShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setTessellationShader( Bool32 tessellationShader_ ) && VULKAN_HPP_NOEXCEPT { tessellationShader = tessellationShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSampleRateShading( Bool32 sampleRateShading_ ) & VULKAN_HPP_NOEXCEPT { sampleRateShading = sampleRateShading_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSampleRateShading( Bool32 sampleRateShading_ ) && VULKAN_HPP_NOEXCEPT { sampleRateShading = sampleRateShading_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDualSrcBlend( Bool32 dualSrcBlend_ ) & VULKAN_HPP_NOEXCEPT { dualSrcBlend = dualSrcBlend_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setDualSrcBlend( Bool32 dualSrcBlend_ ) && VULKAN_HPP_NOEXCEPT { dualSrcBlend = dualSrcBlend_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLogicOp( Bool32 logicOp_ ) & VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setLogicOp( Bool32 logicOp_ ) && VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiDrawIndirect( Bool32 multiDrawIndirect_ ) & VULKAN_HPP_NOEXCEPT { multiDrawIndirect = multiDrawIndirect_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setMultiDrawIndirect( Bool32 multiDrawIndirect_ ) && VULKAN_HPP_NOEXCEPT { multiDrawIndirect = multiDrawIndirect_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ ) & VULKAN_HPP_NOEXCEPT { drawIndirectFirstInstance = drawIndirectFirstInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setDrawIndirectFirstInstance( Bool32 drawIndirectFirstInstance_ ) && VULKAN_HPP_NOEXCEPT { drawIndirectFirstInstance = drawIndirectFirstInstance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthClamp( Bool32 depthClamp_ ) & VULKAN_HPP_NOEXCEPT { depthClamp = depthClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setDepthClamp( Bool32 depthClamp_ ) && VULKAN_HPP_NOEXCEPT { depthClamp = depthClamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBiasClamp( Bool32 depthBiasClamp_ ) & VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setDepthBiasClamp( Bool32 depthBiasClamp_ ) && VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFillModeNonSolid( Bool32 fillModeNonSolid_ ) & VULKAN_HPP_NOEXCEPT { fillModeNonSolid = fillModeNonSolid_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setFillModeNonSolid( Bool32 fillModeNonSolid_ ) && VULKAN_HPP_NOEXCEPT { fillModeNonSolid = fillModeNonSolid_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setDepthBounds( Bool32 depthBounds_ ) & VULKAN_HPP_NOEXCEPT { depthBounds = depthBounds_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setDepthBounds( Bool32 depthBounds_ ) && VULKAN_HPP_NOEXCEPT { depthBounds = depthBounds_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setWideLines( Bool32 wideLines_ ) & VULKAN_HPP_NOEXCEPT { wideLines = wideLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setWideLines( Bool32 wideLines_ ) && VULKAN_HPP_NOEXCEPT { wideLines = wideLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setLargePoints( Bool32 largePoints_ ) & VULKAN_HPP_NOEXCEPT { largePoints = largePoints_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setLargePoints( Bool32 largePoints_ ) && VULKAN_HPP_NOEXCEPT { largePoints = largePoints_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setAlphaToOne( Bool32 alphaToOne_ ) & VULKAN_HPP_NOEXCEPT { alphaToOne = alphaToOne_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setAlphaToOne( Bool32 alphaToOne_ ) && VULKAN_HPP_NOEXCEPT { alphaToOne = alphaToOne_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setMultiViewport( Bool32 multiViewport_ ) & VULKAN_HPP_NOEXCEPT { multiViewport = multiViewport_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setMultiViewport( Bool32 multiViewport_ ) && VULKAN_HPP_NOEXCEPT { multiViewport = multiViewport_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSamplerAnisotropy( Bool32 samplerAnisotropy_ ) & VULKAN_HPP_NOEXCEPT { samplerAnisotropy = samplerAnisotropy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSamplerAnisotropy( Bool32 samplerAnisotropy_ ) && VULKAN_HPP_NOEXCEPT { samplerAnisotropy = samplerAnisotropy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionETC2( Bool32 textureCompressionETC2_ ) & VULKAN_HPP_NOEXCEPT { textureCompressionETC2 = textureCompressionETC2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setTextureCompressionETC2( Bool32 textureCompressionETC2_ ) && VULKAN_HPP_NOEXCEPT { textureCompressionETC2 = textureCompressionETC2_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ ) & VULKAN_HPP_NOEXCEPT { textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setTextureCompressionASTC_LDR( Bool32 textureCompressionASTC_LDR_ ) && VULKAN_HPP_NOEXCEPT { textureCompressionASTC_LDR = textureCompressionASTC_LDR_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setTextureCompressionBC( Bool32 textureCompressionBC_ ) & VULKAN_HPP_NOEXCEPT { textureCompressionBC = textureCompressionBC_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setTextureCompressionBC( Bool32 textureCompressionBC_ ) && VULKAN_HPP_NOEXCEPT { textureCompressionBC = textureCompressionBC_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ ) & VULKAN_HPP_NOEXCEPT { occlusionQueryPrecise = occlusionQueryPrecise_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setOcclusionQueryPrecise( Bool32 occlusionQueryPrecise_ ) && VULKAN_HPP_NOEXCEPT { occlusionQueryPrecise = occlusionQueryPrecise_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ ) & VULKAN_HPP_NOEXCEPT { pipelineStatisticsQuery = pipelineStatisticsQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setPipelineStatisticsQuery( Bool32 pipelineStatisticsQuery_ ) && VULKAN_HPP_NOEXCEPT { pipelineStatisticsQuery = pipelineStatisticsQuery_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ ) & VULKAN_HPP_NOEXCEPT { vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setVertexPipelineStoresAndAtomics( Bool32 vertexPipelineStoresAndAtomics_ ) && VULKAN_HPP_NOEXCEPT { vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ ) & VULKAN_HPP_NOEXCEPT { fragmentStoresAndAtomics = fragmentStoresAndAtomics_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setFragmentStoresAndAtomics( Bool32 fragmentStoresAndAtomics_ ) && VULKAN_HPP_NOEXCEPT { fragmentStoresAndAtomics = fragmentStoresAndAtomics_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ ) & VULKAN_HPP_NOEXCEPT { shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderTessellationAndGeometryPointSize( Bool32 shaderTessellationAndGeometryPointSize_ ) && VULKAN_HPP_NOEXCEPT { shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ ) & VULKAN_HPP_NOEXCEPT { shaderImageGatherExtended = shaderImageGatherExtended_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderImageGatherExtended( Bool32 shaderImageGatherExtended_ ) && VULKAN_HPP_NOEXCEPT { shaderImageGatherExtended = shaderImageGatherExtended_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageImageExtendedFormats( Bool32 shaderStorageImageExtendedFormats_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageMultisample = shaderStorageImageMultisample_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageImageMultisample( Bool32 shaderStorageImageMultisample_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageMultisample = shaderStorageImageMultisample_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageImageReadWithoutFormat( Bool32 shaderStorageImageReadWithoutFormat_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageImageWriteWithoutFormat( Bool32 shaderStorageImageWriteWithoutFormat_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderUniformBufferArrayDynamicIndexing( Bool32 shaderUniformBufferArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderSampledImageArrayDynamicIndexing( Bool32 shaderSampledImageArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageBufferArrayDynamicIndexing( Bool32 shaderStorageBufferArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderStorageImageArrayDynamicIndexing( Bool32 shaderStorageImageArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderClipDistance( Bool32 shaderClipDistance_ ) & VULKAN_HPP_NOEXCEPT { shaderClipDistance = shaderClipDistance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderClipDistance( Bool32 shaderClipDistance_ ) && VULKAN_HPP_NOEXCEPT { shaderClipDistance = shaderClipDistance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderCullDistance( Bool32 shaderCullDistance_ ) & VULKAN_HPP_NOEXCEPT { shaderCullDistance = shaderCullDistance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderCullDistance( Bool32 shaderCullDistance_ ) && VULKAN_HPP_NOEXCEPT { shaderCullDistance = shaderCullDistance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderFloat64( Bool32 shaderFloat64_ ) & VULKAN_HPP_NOEXCEPT { shaderFloat64 = shaderFloat64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderFloat64( Bool32 shaderFloat64_ ) && VULKAN_HPP_NOEXCEPT { shaderFloat64 = shaderFloat64_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt64( Bool32 shaderInt64_ ) & VULKAN_HPP_NOEXCEPT { shaderInt64 = shaderInt64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderInt64( Bool32 shaderInt64_ ) && VULKAN_HPP_NOEXCEPT { shaderInt64 = shaderInt64_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderInt16( Bool32 shaderInt16_ ) & VULKAN_HPP_NOEXCEPT { shaderInt16 = shaderInt16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderInt16( Bool32 shaderInt16_ ) && VULKAN_HPP_NOEXCEPT { shaderInt16 = shaderInt16_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceResidency( Bool32 shaderResourceResidency_ ) & VULKAN_HPP_NOEXCEPT { shaderResourceResidency = shaderResourceResidency_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderResourceResidency( Bool32 shaderResourceResidency_ ) && VULKAN_HPP_NOEXCEPT { shaderResourceResidency = shaderResourceResidency_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setShaderResourceMinLod( Bool32 shaderResourceMinLod_ ) & VULKAN_HPP_NOEXCEPT { shaderResourceMinLod = shaderResourceMinLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setShaderResourceMinLod( Bool32 shaderResourceMinLod_ ) && VULKAN_HPP_NOEXCEPT { shaderResourceMinLod = shaderResourceMinLod_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseBinding( Bool32 sparseBinding_ ) & VULKAN_HPP_NOEXCEPT { sparseBinding = sparseBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseBinding( Bool32 sparseBinding_ ) && VULKAN_HPP_NOEXCEPT { sparseBinding = sparseBinding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ ) & VULKAN_HPP_NOEXCEPT { sparseResidencyBuffer = sparseResidencyBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidencyBuffer( Bool32 sparseResidencyBuffer_ ) && VULKAN_HPP_NOEXCEPT { sparseResidencyBuffer = sparseResidencyBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ ) & VULKAN_HPP_NOEXCEPT { sparseResidencyImage2D = sparseResidencyImage2D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidencyImage2D( Bool32 sparseResidencyImage2D_ ) && VULKAN_HPP_NOEXCEPT { sparseResidencyImage2D = sparseResidencyImage2D_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ ) & VULKAN_HPP_NOEXCEPT { sparseResidencyImage3D = sparseResidencyImage3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidencyImage3D( Bool32 sparseResidencyImage3D_ ) && VULKAN_HPP_NOEXCEPT { sparseResidencyImage3D = sparseResidencyImage3D_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency2Samples( Bool32 sparseResidency2Samples_ ) & VULKAN_HPP_NOEXCEPT { sparseResidency2Samples = sparseResidency2Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidency2Samples( Bool32 sparseResidency2Samples_ ) && VULKAN_HPP_NOEXCEPT { sparseResidency2Samples = sparseResidency2Samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency4Samples( Bool32 sparseResidency4Samples_ ) & VULKAN_HPP_NOEXCEPT { sparseResidency4Samples = sparseResidency4Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidency4Samples( Bool32 sparseResidency4Samples_ ) && VULKAN_HPP_NOEXCEPT { sparseResidency4Samples = sparseResidency4Samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency8Samples( Bool32 sparseResidency8Samples_ ) & VULKAN_HPP_NOEXCEPT { sparseResidency8Samples = sparseResidency8Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidency8Samples( Bool32 sparseResidency8Samples_ ) && VULKAN_HPP_NOEXCEPT { sparseResidency8Samples = sparseResidency8Samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidency16Samples( Bool32 sparseResidency16Samples_ ) & VULKAN_HPP_NOEXCEPT { sparseResidency16Samples = sparseResidency16Samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidency16Samples( Bool32 sparseResidency16Samples_ ) && VULKAN_HPP_NOEXCEPT { sparseResidency16Samples = sparseResidency16Samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setSparseResidencyAliased( Bool32 sparseResidencyAliased_ ) & VULKAN_HPP_NOEXCEPT { sparseResidencyAliased = sparseResidencyAliased_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setSparseResidencyAliased( Bool32 sparseResidencyAliased_ ) && VULKAN_HPP_NOEXCEPT { sparseResidencyAliased = sparseResidencyAliased_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setVariableMultisampleRate( Bool32 variableMultisampleRate_ ) & VULKAN_HPP_NOEXCEPT { variableMultisampleRate = variableMultisampleRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setVariableMultisampleRate( Bool32 variableMultisampleRate_ ) && VULKAN_HPP_NOEXCEPT { variableMultisampleRate = variableMultisampleRate_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & setInheritedQueries( Bool32 inheritedQueries_ ) & VULKAN_HPP_NOEXCEPT { inheritedQueries = inheritedQueries_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures && setInheritedQueries( Bool32 inheritedQueries_ ) && VULKAN_HPP_NOEXCEPT { inheritedQueries = inheritedQueries_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( robustBufferAccess, fullDrawIndexUint32, imageCubeArray, independentBlend, geometryShader, tessellationShader, sampleRateShading, dualSrcBlend, logicOp, multiDrawIndirect, drawIndirectFirstInstance, depthClamp, depthBiasClamp, fillModeNonSolid, depthBounds, wideLines, largePoints, alphaToOne, multiViewport, samplerAnisotropy, textureCompressionETC2, textureCompressionASTC_LDR, textureCompressionBC, occlusionQueryPrecise, pipelineStatisticsQuery, vertexPipelineStoresAndAtomics, fragmentStoresAndAtomics, shaderTessellationAndGeometryPointSize, shaderImageGatherExtended, shaderStorageImageExtendedFormats, shaderStorageImageMultisample, shaderStorageImageReadWithoutFormat, shaderStorageImageWriteWithoutFormat, shaderUniformBufferArrayDynamicIndexing, shaderSampledImageArrayDynamicIndexing, shaderStorageBufferArrayDynamicIndexing, shaderStorageImageArrayDynamicIndexing, shaderClipDistance, shaderCullDistance, shaderFloat64, shaderInt64, shaderInt16, shaderResourceResidency, shaderResourceMinLod, sparseBinding, sparseResidencyBuffer, sparseResidencyImage2D, sparseResidencyImage3D, sparseResidency2Samples, sparseResidency4Samples, sparseResidency8Samples, sparseResidency16Samples, sparseResidencyAliased, variableMultisampleRate, inheritedQueries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFeatures const & ) const = default; #else bool operator==( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( robustBufferAccess == rhs.robustBufferAccess ) && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) && ( imageCubeArray == rhs.imageCubeArray ) && ( independentBlend == rhs.independentBlend ) && ( geometryShader == rhs.geometryShader ) && ( tessellationShader == rhs.tessellationShader ) && ( sampleRateShading == rhs.sampleRateShading ) && ( dualSrcBlend == rhs.dualSrcBlend ) && ( logicOp == rhs.logicOp ) && ( multiDrawIndirect == rhs.multiDrawIndirect ) && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) && ( depthClamp == rhs.depthClamp ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( fillModeNonSolid == rhs.fillModeNonSolid ) && ( depthBounds == rhs.depthBounds ) && ( wideLines == rhs.wideLines ) && ( largePoints == rhs.largePoints ) && ( alphaToOne == rhs.alphaToOne ) && ( multiViewport == rhs.multiViewport ) && ( samplerAnisotropy == rhs.samplerAnisotropy ) && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) && ( textureCompressionBC == rhs.textureCompressionBC ) && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) && ( shaderClipDistance == rhs.shaderClipDistance ) && ( shaderCullDistance == rhs.shaderCullDistance ) && ( shaderFloat64 == rhs.shaderFloat64 ) && ( shaderInt64 == rhs.shaderInt64 ) && ( shaderInt16 == rhs.shaderInt16 ) && ( shaderResourceResidency == rhs.shaderResourceResidency ) && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) && ( sparseBinding == rhs.sparseBinding ) && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) && ( variableMultisampleRate == rhs.variableMultisampleRate ) && ( inheritedQueries == rhs.inheritedQueries ); # endif } bool operator!=( PhysicalDeviceFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Bool32 robustBufferAccess = {}; Bool32 fullDrawIndexUint32 = {}; Bool32 imageCubeArray = {}; Bool32 independentBlend = {}; Bool32 geometryShader = {}; Bool32 tessellationShader = {}; Bool32 sampleRateShading = {}; Bool32 dualSrcBlend = {}; Bool32 logicOp = {}; Bool32 multiDrawIndirect = {}; Bool32 drawIndirectFirstInstance = {}; Bool32 depthClamp = {}; Bool32 depthBiasClamp = {}; Bool32 fillModeNonSolid = {}; Bool32 depthBounds = {}; Bool32 wideLines = {}; Bool32 largePoints = {}; Bool32 alphaToOne = {}; Bool32 multiViewport = {}; Bool32 samplerAnisotropy = {}; Bool32 textureCompressionETC2 = {}; Bool32 textureCompressionASTC_LDR = {}; Bool32 textureCompressionBC = {}; Bool32 occlusionQueryPrecise = {}; Bool32 pipelineStatisticsQuery = {}; Bool32 vertexPipelineStoresAndAtomics = {}; Bool32 fragmentStoresAndAtomics = {}; Bool32 shaderTessellationAndGeometryPointSize = {}; Bool32 shaderImageGatherExtended = {}; Bool32 shaderStorageImageExtendedFormats = {}; Bool32 shaderStorageImageMultisample = {}; Bool32 shaderStorageImageReadWithoutFormat = {}; Bool32 shaderStorageImageWriteWithoutFormat = {}; Bool32 shaderUniformBufferArrayDynamicIndexing = {}; Bool32 shaderSampledImageArrayDynamicIndexing = {}; Bool32 shaderStorageBufferArrayDynamicIndexing = {}; Bool32 shaderStorageImageArrayDynamicIndexing = {}; Bool32 shaderClipDistance = {}; Bool32 shaderCullDistance = {}; Bool32 shaderFloat64 = {}; Bool32 shaderInt64 = {}; Bool32 shaderInt16 = {}; Bool32 shaderResourceResidency = {}; Bool32 shaderResourceMinLod = {}; Bool32 sparseBinding = {}; Bool32 sparseResidencyBuffer = {}; Bool32 sparseResidencyImage2D = {}; Bool32 sparseResidencyImage3D = {}; Bool32 sparseResidency2Samples = {}; Bool32 sparseResidency4Samples = {}; Bool32 sparseResidency8Samples = {}; Bool32 sparseResidency16Samples = {}; Bool32 sparseResidencyAliased = {}; Bool32 variableMultisampleRate = {}; Bool32 inheritedQueries = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFeatures; }; #endif // wrapper struct for struct VkDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceCreateInfo.html struct DeviceCreateInfo { using NativeType = VkDeviceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) # if defined( _MSC_VER ) # pragma warning( push ) # pragma warning( disable : 4996 ) // 'function': was declared deprecated # elif defined( __clang__ ) # pragma clang diagnostic push # pragma clang diagnostic ignored "-Wdeprecated-declarations" # elif defined( __GNUC__ ) # pragma GCC diagnostic push # pragma GCC diagnostic ignored "-Wdeprecated-declarations" # else // unknown compiler... just ignore the warnings for yourselves ;) # endif VULKAN_HPP_CONSTEXPR_17 DeviceCreateInfo( DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const DeviceQueueCreateInfo * pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , queueCreateInfoCount{ queueCreateInfoCount_ } , pQueueCreateInfos{ pQueueCreateInfos_ } , enabledLayerCount{ enabledLayerCount_ } , ppEnabledLayerNames{ ppEnabledLayerNames_ } , enabledExtensionCount{ enabledExtensionCount_ } , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } , pEnabledFeatures{ pEnabledFeatures_ } { } VULKAN_HPP_CONSTEXPR_17 DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceCreateInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceCreateInfo( DeviceCreateFlags flags_, ArrayProxyNoTemporaries const & queueCreateInfos_, ArrayProxyNoTemporaries const & pEnabledLayerNames_ = {}, ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, const PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , queueCreateInfoCount( static_cast( queueCreateInfos_.size() ) ) , pQueueCreateInfos( queueCreateInfos_.data() ) , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) , ppEnabledLayerNames( pEnabledLayerNames_.data() ) , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) , pEnabledFeatures( pEnabledFeatures_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; # if defined( _MSC_VER ) # pragma warning( pop ) # elif defined( __clang__ ) # pragma clang diagnostic pop # elif defined( __GNUC__ ) # pragma GCC diagnostic pop # else // unknown compiler... just ignore the warnings for yourselves ;) # endif #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { pNext = rhs.pNext; flags = static_cast( rhs.flags ); queueCreateInfoCount = rhs.queueCreateInfoCount; pQueueCreateInfos = reinterpret_cast( rhs.pQueueCreateInfos ); enabledExtensionCount = rhs.enabledExtensionCount; ppEnabledExtensionNames = rhs.ppEnabledExtensionNames; pEnabledFeatures = reinterpret_cast( rhs.pEnabledFeatures ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setFlags( DeviceCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setFlags( DeviceCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) & VULKAN_HPP_NOEXCEPT { queueCreateInfoCount = queueCreateInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) && VULKAN_HPP_NOEXCEPT { queueCreateInfoCount = queueCreateInfoCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPQueueCreateInfos( const DeviceQueueCreateInfo * pQueueCreateInfos_ ) & VULKAN_HPP_NOEXCEPT { pQueueCreateInfos = pQueueCreateInfos_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setPQueueCreateInfos( const DeviceQueueCreateInfo * pQueueCreateInfos_ ) && VULKAN_HPP_NOEXCEPT { pQueueCreateInfos = pQueueCreateInfos_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceCreateInfo & setQueueCreateInfos( ArrayProxyNoTemporaries const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT { queueCreateInfoCount = static_cast( queueCreateInfos_.size() ); pQueueCreateInfos = queueCreateInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_DEPRECATED( "ignored" ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) & VULKAN_HPP_NOEXCEPT { detail::ignore( enabledLayerCount_ ); return *this; } VULKAN_HPP_DEPRECATED( "ignored" ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setEnabledLayerCount( uint32_t enabledLayerCount_ ) && VULKAN_HPP_NOEXCEPT { detail::ignore( enabledLayerCount_ ); return std::move( *this ); } VULKAN_HPP_DEPRECATED( "ignored" ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) & VULKAN_HPP_NOEXCEPT { detail::ignore( ppEnabledLayerNames_ ); return *this; } VULKAN_HPP_DEPRECATED( "ignored" ) VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) && VULKAN_HPP_NOEXCEPT { detail::ignore( ppEnabledLayerNames_ ); return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) VULKAN_HPP_DEPRECATED( "ignored" ) DeviceCreateInfo & setPEnabledLayerNames( ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { detail::ignore( pEnabledLayerNames_ ); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) & VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) && VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) & VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) && VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceCreateInfo & setPEnabledExtensionNames( ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); ppEnabledExtensionNames = pEnabledExtensionNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & setPEnabledFeatures( const PhysicalDeviceFeatures * pEnabledFeatures_ ) & VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo && setPEnabledFeatures( const PhysicalDeviceFeatures * pEnabledFeatures_ ) && VULKAN_HPP_NOEXCEPT { pEnabledFeatures = pEnabledFeatures_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, queueCreateInfoCount, pQueueCreateInfos, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames, pEnabledFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = queueCreateInfoCount <=> rhs.queueCreateInfoCount; cmp != 0 ) return cmp; if ( auto cmp = pQueueCreateInfos <=> rhs.pQueueCreateInfos; cmp != 0 ) return cmp; if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < enabledExtensionCount; ++i ) { if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } if ( auto cmp = pEnabledFeatures <=> rhs.pEnabledFeatures; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) && ( enabledExtensionCount == rhs.enabledExtensionCount ) && std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && ( pEnabledFeatures == rhs.pEnabledFeatures ); } bool operator!=( DeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDeviceCreateInfo; const void * pNext = {}; DeviceCreateFlags flags = {}; uint32_t queueCreateInfoCount = {}; const DeviceQueueCreateInfo * pQueueCreateInfos = {}; VULKAN_HPP_DEPRECATED( "ignored" ) uint32_t enabledLayerCount = {}; VULKAN_HPP_DEPRECATED( "ignored" ) const char * const * ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; const char * const * ppEnabledExtensionNames = {}; const PhysicalDeviceFeatures * pEnabledFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceCreateInfo; }; #endif template <> struct CppType { using Type = DeviceCreateInfo; }; // wrapper struct for struct VkDeviceMemoryReportCallbackDataEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryReportCallbackDataEXT.html struct DeviceMemoryReportCallbackDataEXT { using NativeType = VkDeviceMemoryReportCallbackDataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportFlagsEXT flags_ = {}, DeviceMemoryReportEventTypeEXT type_ = DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, DeviceSize size_ = {}, ObjectType objectType_ = ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , type{ type_ } , memoryObjectId{ memoryObjectId_ } , size{ size_ } , objectType{ objectType_ } , objectHandle{ objectHandle_ } , heapIndex{ heapIndex_ } { } VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) { } DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryReportCallbackDataEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceMemoryReportCallbackDataEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; #else bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); # endif } bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; void * pNext = {}; DeviceMemoryReportFlagsEXT flags = {}; DeviceMemoryReportEventTypeEXT type = DeviceMemoryReportEventTypeEXT::eAllocate; uint64_t memoryObjectId = {}; DeviceSize size = {}; ObjectType objectType = ObjectType::eUnknown; uint64_t objectHandle = {}; uint32_t heapIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceMemoryReportCallbackDataEXT; }; #endif template <> struct CppType { using Type = DeviceMemoryReportCallbackDataEXT; }; typedef void( VKAPI_PTR * PFN_DeviceMemoryReportCallbackEXT )( const DeviceMemoryReportCallbackDataEXT * pCallbackData, void * pUserData ); // wrapper struct for struct VkDeviceDeviceMemoryReportCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDeviceMemoryReportCreateInfoEXT.html struct DeviceDeviceMemoryReportCreateInfoEXT { using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceMemoryReportFlagsEXT flags_ = {}, PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pfnUserCallback{ pfnUserCallback_ } , pUserData{ pUserData_ } { } VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) DeviceDeviceMemoryReportCreateInfoEXT( DeviceMemoryReportFlagsEXT flags_, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : DeviceDeviceMemoryReportCreateInfoEXT( flags_, reinterpret_cast( pfnUserCallback_ ), pUserData_, pNext_ ) { } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setFlags( DeviceMemoryReportFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT && setFlags( DeviceMemoryReportFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) & VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT && setPfnUserCallback( PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) && VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void * pUserData_ ) & VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT && setPUserData( void * pUserData_ ) && VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return std::move( *this ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic push # if defined( __clang__ ) # pragma clang diagnostic ignored "-Wunknown-warning-option" # endif # pragma GCC diagnostic ignored "-Wcast-function-type" # endif VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); } # if defined( __clang__ ) || defined( __GNUC__ ) # pragma GCC diagnostic pop # endif #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceDeviceMemoryReportCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceDeviceMemoryReportCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pfnUserCallback, pUserData ); } #endif bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnUserCallback == rhs.pfnUserCallback ) && ( pUserData == rhs.pUserData ); #endif } bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; const void * pNext = {}; DeviceMemoryReportFlagsEXT flags = {}; PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {}; void * pUserData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceDeviceMemoryReportCreateInfoEXT; }; #endif template <> struct CppType { using Type = DeviceDeviceMemoryReportCreateInfoEXT; }; // wrapper struct for struct VkDeviceDiagnosticsConfigCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceDiagnosticsConfigCreateInfoNV.html struct DeviceDiagnosticsConfigCreateInfoNV { using NativeType = VkDeviceDiagnosticsConfigCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast( &rhs ) ) { } DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & setFlags( DeviceDiagnosticsConfigFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV && setFlags( DeviceDiagnosticsConfigFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceDiagnosticsConfigCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceDiagnosticsConfigCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceDiagnosticsConfigCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const & ) const = default; #else bool operator==( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV; const void * pNext = {}; DeviceDiagnosticsConfigFlagsNV flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceDiagnosticsConfigCreateInfoNV; }; #endif template <> struct CppType { using Type = DeviceDiagnosticsConfigCreateInfoNV; }; // wrapper struct for struct VkDeviceEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceEventInfoEXT.html struct DeviceEventInfoEXT { using NativeType = VkDeviceEventInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventTypeEXT deviceEvent_ = DeviceEventTypeEXT::eDisplayHotplug, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceEvent{ deviceEvent_ } { } VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceEventInfoEXT( *reinterpret_cast( &rhs ) ) {} DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & setDeviceEvent( DeviceEventTypeEXT deviceEvent_ ) & VULKAN_HPP_NOEXCEPT { deviceEvent = deviceEvent_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT && setDeviceEvent( DeviceEventTypeEXT deviceEvent_ ) && VULKAN_HPP_NOEXCEPT { deviceEvent = deviceEvent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceEventInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceEvent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceEventInfoEXT const & ) const = default; #else bool operator==( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceEvent == rhs.deviceEvent ); # endif } bool operator!=( DeviceEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceEventInfoEXT; const void * pNext = {}; DeviceEventTypeEXT deviceEvent = DeviceEventTypeEXT::eDisplayHotplug; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceEventInfoEXT; }; #endif template <> struct CppType { using Type = DeviceEventInfoEXT; }; // wrapper struct for struct VkDeviceFaultAddressInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultAddressInfoEXT.html struct DeviceFaultAddressInfoEXT { using NativeType = VkDeviceFaultAddressInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressTypeEXT addressType_ = DeviceFaultAddressTypeEXT::eNone, DeviceAddress reportedAddress_ = {}, DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT : addressType{ addressType_ } , reportedAddress{ reportedAddress_ } , addressPrecision{ addressPrecision_ } { } VULKAN_HPP_CONSTEXPR DeviceFaultAddressInfoEXT( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceFaultAddressInfoEXT( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultAddressInfoEXT( *reinterpret_cast( &rhs ) ) { } DeviceFaultAddressInfoEXT & operator=( DeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceFaultAddressInfoEXT & operator=( VkDeviceFaultAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressType( DeviceFaultAddressTypeEXT addressType_ ) & VULKAN_HPP_NOEXCEPT { addressType = addressType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT && setAddressType( DeviceFaultAddressTypeEXT addressType_ ) && VULKAN_HPP_NOEXCEPT { addressType = addressType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setReportedAddress( DeviceAddress reportedAddress_ ) & VULKAN_HPP_NOEXCEPT { reportedAddress = reportedAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT && setReportedAddress( DeviceAddress reportedAddress_ ) && VULKAN_HPP_NOEXCEPT { reportedAddress = reportedAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT & setAddressPrecision( DeviceSize addressPrecision_ ) & VULKAN_HPP_NOEXCEPT { addressPrecision = addressPrecision_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultAddressInfoEXT && setAddressPrecision( DeviceSize addressPrecision_ ) && VULKAN_HPP_NOEXCEPT { addressPrecision = addressPrecision_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceFaultAddressInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultAddressInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultAddressInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceFaultAddressInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( addressType, reportedAddress, addressPrecision ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceFaultAddressInfoEXT const & ) const = default; #else bool operator==( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( addressType == rhs.addressType ) && ( reportedAddress == rhs.reportedAddress ) && ( addressPrecision == rhs.addressPrecision ); # endif } bool operator!=( DeviceFaultAddressInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceFaultAddressTypeEXT addressType = DeviceFaultAddressTypeEXT::eNone; DeviceAddress reportedAddress = {}; DeviceSize addressPrecision = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceFaultAddressInfoEXT; }; #endif // wrapper struct for struct VkDeviceFaultCountsEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultCountsEXT.html struct DeviceFaultCountsEXT { using NativeType = VkDeviceFaultCountsEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultCountsEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( uint32_t addressInfoCount_ = {}, uint32_t vendorInfoCount_ = {}, DeviceSize vendorBinarySize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , addressInfoCount{ addressInfoCount_ } , vendorInfoCount{ vendorInfoCount_ } , vendorBinarySize{ vendorBinarySize_ } { } VULKAN_HPP_CONSTEXPR DeviceFaultCountsEXT( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceFaultCountsEXT( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultCountsEXT( *reinterpret_cast( &rhs ) ) { } DeviceFaultCountsEXT & operator=( DeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceFaultCountsEXT & operator=( VkDeviceFaultCountsEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setAddressInfoCount( uint32_t addressInfoCount_ ) & VULKAN_HPP_NOEXCEPT { addressInfoCount = addressInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT && setAddressInfoCount( uint32_t addressInfoCount_ ) && VULKAN_HPP_NOEXCEPT { addressInfoCount = addressInfoCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorInfoCount( uint32_t vendorInfoCount_ ) & VULKAN_HPP_NOEXCEPT { vendorInfoCount = vendorInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT && setVendorInfoCount( uint32_t vendorInfoCount_ ) && VULKAN_HPP_NOEXCEPT { vendorInfoCount = vendorInfoCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT & setVendorBinarySize( DeviceSize vendorBinarySize_ ) & VULKAN_HPP_NOEXCEPT { vendorBinarySize = vendorBinarySize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultCountsEXT && setVendorBinarySize( DeviceSize vendorBinarySize_ ) && VULKAN_HPP_NOEXCEPT { vendorBinarySize = vendorBinarySize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceFaultCountsEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultCountsEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultCountsEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceFaultCountsEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, addressInfoCount, vendorInfoCount, vendorBinarySize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceFaultCountsEXT const & ) const = default; #else bool operator==( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( addressInfoCount == rhs.addressInfoCount ) && ( vendorInfoCount == rhs.vendorInfoCount ) && ( vendorBinarySize == rhs.vendorBinarySize ); # endif } bool operator!=( DeviceFaultCountsEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceFaultCountsEXT; void * pNext = {}; uint32_t addressInfoCount = {}; uint32_t vendorInfoCount = {}; DeviceSize vendorBinarySize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceFaultCountsEXT; }; #endif template <> struct CppType { using Type = DeviceFaultCountsEXT; }; // wrapper struct for struct VkDeviceFaultVendorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorInfoEXT.html struct DeviceFaultVendorInfoEXT { using NativeType = VkDeviceFaultVendorInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array const & description_ = {}, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT : description{ description_ } , vendorFaultCode{ vendorFaultCode_ } , vendorFaultData{ vendorFaultData_ } { } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceFaultVendorInfoEXT( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultVendorInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceFaultVendorInfoEXT( std::string const & description_, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) : vendorFaultCode( vendorFaultCode_ ), vendorFaultData( vendorFaultData_ ) { VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); # if defined( _WIN32 ) strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); # else strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); # endif } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceFaultVendorInfoEXT & operator=( DeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceFaultVendorInfoEXT & operator=( VkDeviceFaultVendorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setDescription( std::array description_ ) & VULKAN_HPP_NOEXCEPT { description = description_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT && setDescription( std::array description_ ) && VULKAN_HPP_NOEXCEPT { description = description_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceFaultVendorInfoEXT & setDescription( std::string const & description_ ) VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); # if defined( _WIN32 ) strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); # else strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); # endif return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultCode( uint64_t vendorFaultCode_ ) & VULKAN_HPP_NOEXCEPT { vendorFaultCode = vendorFaultCode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT && setVendorFaultCode( uint64_t vendorFaultCode_ ) && VULKAN_HPP_NOEXCEPT { vendorFaultCode = vendorFaultCode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT & setVendorFaultData( uint64_t vendorFaultData_ ) & VULKAN_HPP_NOEXCEPT { vendorFaultData = vendorFaultData_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT && setVendorFaultData( uint64_t vendorFaultData_ ) && VULKAN_HPP_NOEXCEPT { vendorFaultData = vendorFaultData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceFaultVendorInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultVendorInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultVendorInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceFaultVendorInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint64_t const &, uint64_t const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( description, vendorFaultCode, vendorFaultData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = vendorFaultCode <=> rhs.vendorFaultCode; cmp != 0 ) return cmp; if ( auto cmp = vendorFaultData <=> rhs.vendorFaultData; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( strcmp( description, rhs.description ) == 0 ) && ( vendorFaultCode == rhs.vendorFaultCode ) && ( vendorFaultData == rhs.vendorFaultData ); } bool operator!=( DeviceFaultVendorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: ArrayWrapper1D description = {}; uint64_t vendorFaultCode = {}; uint64_t vendorFaultData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceFaultVendorInfoEXT; }; #endif // wrapper struct for struct VkDeviceFaultInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultInfoEXT.html struct DeviceFaultInfoEXT { using NativeType = VkDeviceFaultInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceFaultInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( std::array const & description_ = {}, DeviceFaultAddressInfoEXT * pAddressInfos_ = {}, DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, void * pVendorBinaryData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , description{ description_ } , pAddressInfos{ pAddressInfos_ } , pVendorInfos{ pVendorInfos_ } , pVendorBinaryData{ pVendorBinaryData_ } { } VULKAN_HPP_CONSTEXPR_14 DeviceFaultInfoEXT( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceFaultInfoEXT( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultInfoEXT( *reinterpret_cast( &rhs ) ) {} DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceFaultInfoEXT & operator=( VkDeviceFaultInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDeviceFaultInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceFaultInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, DeviceFaultAddressInfoEXT * const &, DeviceFaultVendorInfoEXT * const &, void * const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, description, pAddressInfos, pVendorInfos, pVendorBinaryData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = pAddressInfos <=> rhs.pAddressInfos; cmp != 0 ) return cmp; if ( auto cmp = pVendorInfos <=> rhs.pVendorInfos; cmp != 0 ) return cmp; if ( auto cmp = pVendorBinaryData <=> rhs.pVendorBinaryData; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( description, rhs.description ) == 0 ) && ( pAddressInfos == rhs.pAddressInfos ) && ( pVendorInfos == rhs.pVendorInfos ) && ( pVendorBinaryData == rhs.pVendorBinaryData ); } bool operator!=( DeviceFaultInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDeviceFaultInfoEXT; void * pNext = {}; ArrayWrapper1D description = {}; DeviceFaultAddressInfoEXT * pAddressInfos = {}; DeviceFaultVendorInfoEXT * pVendorInfos = {}; void * pVendorBinaryData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceFaultInfoEXT; }; #endif template <> struct CppType { using Type = DeviceFaultInfoEXT; }; // wrapper struct for struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceFaultVendorBinaryHeaderVersionOneEXT.html struct DeviceFaultVendorBinaryHeaderVersionOneEXT { using NativeType = VkDeviceFaultVendorBinaryHeaderVersionOneEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( uint32_t headerSize_ = {}, DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ = DeviceFaultVendorBinaryHeaderVersionEXT::eOne, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, uint32_t driverVersion_ = {}, std::array const & pipelineCacheUUID_ = {}, uint32_t applicationNameOffset_ = {}, uint32_t applicationVersion_ = {}, uint32_t engineNameOffset_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT : headerSize{ headerSize_ } , headerVersion{ headerVersion_ } , vendorID{ vendorID_ } , deviceID{ deviceID_ } , driverVersion{ driverVersion_ } , pipelineCacheUUID{ pipelineCacheUUID_ } , applicationNameOffset{ applicationNameOffset_ } , applicationVersion{ applicationVersion_ } , engineNameOffset{ engineNameOffset_ } , engineVersion{ engineVersion_ } , apiVersion{ apiVersion_ } { } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceFaultVendorBinaryHeaderVersionOneEXT( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceFaultVendorBinaryHeaderVersionOneEXT( *reinterpret_cast( &rhs ) ) { } DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceFaultVendorBinaryHeaderVersionOneEXT & operator=( VkDeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderSize( uint32_t headerSize_ ) & VULKAN_HPP_NOEXCEPT { headerSize = headerSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setHeaderSize( uint32_t headerSize_ ) && VULKAN_HPP_NOEXCEPT { headerSize = headerSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setHeaderVersion( DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) & VULKAN_HPP_NOEXCEPT { headerVersion = headerVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setHeaderVersion( DeviceFaultVendorBinaryHeaderVersionEXT headerVersion_ ) && VULKAN_HPP_NOEXCEPT { headerVersion = headerVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setVendorID( uint32_t vendorID_ ) & VULKAN_HPP_NOEXCEPT { vendorID = vendorID_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setVendorID( uint32_t vendorID_ ) && VULKAN_HPP_NOEXCEPT { vendorID = vendorID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDeviceID( uint32_t deviceID_ ) & VULKAN_HPP_NOEXCEPT { deviceID = deviceID_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setDeviceID( uint32_t deviceID_ ) && VULKAN_HPP_NOEXCEPT { deviceID = deviceID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setDriverVersion( uint32_t driverVersion_ ) & VULKAN_HPP_NOEXCEPT { driverVersion = driverVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setDriverVersion( uint32_t driverVersion_ ) && VULKAN_HPP_NOEXCEPT { driverVersion = driverVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setPipelineCacheUUID( std::array pipelineCacheUUID_ ) & VULKAN_HPP_NOEXCEPT { pipelineCacheUUID = pipelineCacheUUID_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setPipelineCacheUUID( std::array pipelineCacheUUID_ ) && VULKAN_HPP_NOEXCEPT { pipelineCacheUUID = pipelineCacheUUID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationNameOffset( uint32_t applicationNameOffset_ ) & VULKAN_HPP_NOEXCEPT { applicationNameOffset = applicationNameOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setApplicationNameOffset( uint32_t applicationNameOffset_ ) && VULKAN_HPP_NOEXCEPT { applicationNameOffset = applicationNameOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApplicationVersion( uint32_t applicationVersion_ ) & VULKAN_HPP_NOEXCEPT { applicationVersion = applicationVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setApplicationVersion( uint32_t applicationVersion_ ) && VULKAN_HPP_NOEXCEPT { applicationVersion = applicationVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineNameOffset( uint32_t engineNameOffset_ ) & VULKAN_HPP_NOEXCEPT { engineNameOffset = engineNameOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setEngineNameOffset( uint32_t engineNameOffset_ ) && VULKAN_HPP_NOEXCEPT { engineNameOffset = engineNameOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setEngineVersion( uint32_t engineVersion_ ) & VULKAN_HPP_NOEXCEPT { engineVersion = engineVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setEngineVersion( uint32_t engineVersion_ ) && VULKAN_HPP_NOEXCEPT { engineVersion = engineVersion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT & setApiVersion( uint32_t apiVersion_ ) & VULKAN_HPP_NOEXCEPT { apiVersion = apiVersion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorBinaryHeaderVersionOneEXT && setApiVersion( uint32_t apiVersion_ ) && VULKAN_HPP_NOEXCEPT { apiVersion = apiVersion_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceFaultVendorBinaryHeaderVersionOneEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( headerSize, headerVersion, vendorID, deviceID, driverVersion, pipelineCacheUUID, applicationNameOffset, applicationVersion, engineNameOffset, engineVersion, apiVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceFaultVendorBinaryHeaderVersionOneEXT const & ) const = default; #else bool operator==( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( headerSize == rhs.headerSize ) && ( headerVersion == rhs.headerVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( driverVersion == rhs.driverVersion ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && ( applicationNameOffset == rhs.applicationNameOffset ) && ( applicationVersion == rhs.applicationVersion ) && ( engineNameOffset == rhs.engineNameOffset ) && ( engineVersion == rhs.engineVersion ) && ( apiVersion == rhs.apiVersion ); # endif } bool operator!=( DeviceFaultVendorBinaryHeaderVersionOneEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t headerSize = {}; DeviceFaultVendorBinaryHeaderVersionEXT headerVersion = DeviceFaultVendorBinaryHeaderVersionEXT::eOne; uint32_t vendorID = {}; uint32_t deviceID = {}; uint32_t driverVersion = {}; ArrayWrapper1D pipelineCacheUUID = {}; uint32_t applicationNameOffset = {}; uint32_t applicationVersion = {}; uint32_t engineNameOffset = {}; uint32_t engineVersion = {}; uint32_t apiVersion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceFaultVendorBinaryHeaderVersionOneEXT; }; #endif // wrapper struct for struct VkDeviceGroupBindSparseInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupBindSparseInfo.html struct DeviceGroupBindSparseInfo { using NativeType = VkDeviceGroupBindSparseInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , resourceDeviceIndex{ resourceDeviceIndex_ } , memoryDeviceIndex{ memoryDeviceIndex_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupBindSparseInfo( *reinterpret_cast( &rhs ) ) { } DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) & VULKAN_HPP_NOEXCEPT { resourceDeviceIndex = resourceDeviceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo && setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) && VULKAN_HPP_NOEXCEPT { resourceDeviceIndex = resourceDeviceIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) & VULKAN_HPP_NOEXCEPT { memoryDeviceIndex = memoryDeviceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo && setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) && VULKAN_HPP_NOEXCEPT { memoryDeviceIndex = memoryDeviceIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupBindSparseInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupBindSparseInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupBindSparseInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, resourceDeviceIndex, memoryDeviceIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupBindSparseInfo const & ) const = default; #else bool operator==( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); # endif } bool operator!=( DeviceGroupBindSparseInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupBindSparseInfo; const void * pNext = {}; uint32_t resourceDeviceIndex = {}; uint32_t memoryDeviceIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupBindSparseInfo; }; #endif template <> struct CppType { using Type = DeviceGroupBindSparseInfo; }; using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; // wrapper struct for struct VkDeviceGroupCommandBufferBeginInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupCommandBufferBeginInfo.html struct DeviceGroupCommandBufferBeginInfo { using NativeType = VkDeviceGroupCommandBufferBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceMask{ deviceMask_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast( &rhs ) ) { } DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) & VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo && setDeviceMask( uint32_t deviceMask_ ) && VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupCommandBufferBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupCommandBufferBeginInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupCommandBufferBeginInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupCommandBufferBeginInfo const & ) const = default; #else bool operator==( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ); # endif } bool operator!=( DeviceGroupCommandBufferBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; const void * pNext = {}; uint32_t deviceMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupCommandBufferBeginInfo; }; #endif template <> struct CppType { using Type = DeviceGroupCommandBufferBeginInfo; }; using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; // wrapper struct for struct VkDeviceGroupDeviceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupDeviceCreateInfo.html struct DeviceGroupDeviceCreateInfo { using NativeType = VkDeviceGroupDeviceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, const PhysicalDevice * pPhysicalDevices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , physicalDeviceCount{ physicalDeviceCount_ } , pPhysicalDevices{ pPhysicalDevices_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupDeviceCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupDeviceCreateInfo( ArrayProxyNoTemporaries const & physicalDevices_, const void * pNext_ = nullptr ) : pNext( pNext_ ), physicalDeviceCount( static_cast( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) & VULKAN_HPP_NOEXCEPT { physicalDeviceCount = physicalDeviceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo && setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) && VULKAN_HPP_NOEXCEPT { physicalDeviceCount = physicalDeviceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const PhysicalDevice * pPhysicalDevices_ ) & VULKAN_HPP_NOEXCEPT { pPhysicalDevices = pPhysicalDevices_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo && setPPhysicalDevices( const PhysicalDevice * pPhysicalDevices_ ) && VULKAN_HPP_NOEXCEPT { pPhysicalDevices = pPhysicalDevices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupDeviceCreateInfo & setPhysicalDevices( ArrayProxyNoTemporaries const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT { physicalDeviceCount = static_cast( physicalDevices_.size() ); pPhysicalDevices = physicalDevices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupDeviceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupDeviceCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupDeviceCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, physicalDeviceCount, pPhysicalDevices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupDeviceCreateInfo const & ) const = default; #else bool operator==( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && ( pPhysicalDevices == rhs.pPhysicalDevices ); # endif } bool operator!=( DeviceGroupDeviceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; const void * pNext = {}; uint32_t physicalDeviceCount = {}; const PhysicalDevice * pPhysicalDevices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupDeviceCreateInfo; }; #endif template <> struct CppType { using Type = DeviceGroupDeviceCreateInfo; }; using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; // wrapper struct for struct VkDeviceGroupPresentCapabilitiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentCapabilitiesKHR.html struct DeviceGroupPresentCapabilitiesKHR { using NativeType = VkDeviceGroupPresentCapabilitiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, DeviceGroupPresentModeFlagsKHR modes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentMask{ presentMask_ } , modes{ modes_ } { } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDeviceGroupPresentCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupPresentCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupPresentCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, DeviceGroupPresentModeFlagsKHR const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentMask, modes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupPresentCapabilitiesKHR const & ) const = default; #else bool operator==( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMask == rhs.presentMask ) && ( modes == rhs.modes ); # endif } bool operator!=( DeviceGroupPresentCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; void * pNext = {}; ArrayWrapper1D presentMask = {}; DeviceGroupPresentModeFlagsKHR modes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupPresentCapabilitiesKHR; }; #endif template <> struct CppType { using Type = DeviceGroupPresentCapabilitiesKHR; }; // wrapper struct for struct VkDeviceGroupPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupPresentInfoKHR.html struct DeviceGroupPresentInfoKHR { using NativeType = VkDeviceGroupPresentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, const uint32_t * pDeviceMasks_ = {}, DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , swapchainCount{ swapchainCount_ } , pDeviceMasks{ pDeviceMasks_ } , mode{ mode_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupPresentInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupPresentInfoKHR( ArrayProxyNoTemporaries const & deviceMasks_, DeviceGroupPresentModeFlagBitsKHR mode_ = DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr ) : pNext( pNext_ ), swapchainCount( static_cast( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) & VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR && setSwapchainCount( uint32_t swapchainCount_ ) && VULKAN_HPP_NOEXCEPT { swapchainCount = swapchainCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t * pDeviceMasks_ ) & VULKAN_HPP_NOEXCEPT { pDeviceMasks = pDeviceMasks_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR && setPDeviceMasks( const uint32_t * pDeviceMasks_ ) && VULKAN_HPP_NOEXCEPT { pDeviceMasks = pDeviceMasks_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupPresentInfoKHR & setDeviceMasks( ArrayProxyNoTemporaries const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT { swapchainCount = static_cast( deviceMasks_.size() ); pDeviceMasks = deviceMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & setMode( DeviceGroupPresentModeFlagBitsKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR && setMode( DeviceGroupPresentModeFlagBitsKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupPresentInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, swapchainCount, pDeviceMasks, mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupPresentInfoKHR const & ) const = default; #else bool operator==( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchainCount == rhs.swapchainCount ) && ( pDeviceMasks == rhs.pDeviceMasks ) && ( mode == rhs.mode ); # endif } bool operator!=( DeviceGroupPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; const void * pNext = {}; uint32_t swapchainCount = {}; const uint32_t * pDeviceMasks = {}; DeviceGroupPresentModeFlagBitsKHR mode = DeviceGroupPresentModeFlagBitsKHR::eLocal; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupPresentInfoKHR; }; #endif template <> struct CppType { using Type = DeviceGroupPresentInfoKHR; }; // wrapper struct for struct VkDeviceGroupRenderPassBeginInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupRenderPassBeginInfo.html struct DeviceGroupRenderPassBeginInfo { using NativeType = VkDeviceGroupRenderPassBeginInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const Rect2D * pDeviceRenderAreas_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceMask{ deviceMask_ } , deviceRenderAreaCount{ deviceRenderAreaCount_ } , pDeviceRenderAreas{ pDeviceRenderAreas_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupRenderPassBeginInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, ArrayProxyNoTemporaries const & deviceRenderAreas_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , deviceMask( deviceMask_ ) , deviceRenderAreaCount( static_cast( deviceRenderAreas_.size() ) ) , pDeviceRenderAreas( deviceRenderAreas_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) & VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo && setDeviceMask( uint32_t deviceMask_ ) && VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) & VULKAN_HPP_NOEXCEPT { deviceRenderAreaCount = deviceRenderAreaCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo && setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) && VULKAN_HPP_NOEXCEPT { deviceRenderAreaCount = deviceRenderAreaCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const Rect2D * pDeviceRenderAreas_ ) & VULKAN_HPP_NOEXCEPT { pDeviceRenderAreas = pDeviceRenderAreas_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo && setPDeviceRenderAreas( const Rect2D * pDeviceRenderAreas_ ) && VULKAN_HPP_NOEXCEPT { pDeviceRenderAreas = pDeviceRenderAreas_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( ArrayProxyNoTemporaries const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT { deviceRenderAreaCount = static_cast( deviceRenderAreas_.size() ); pDeviceRenderAreas = deviceRenderAreas_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupRenderPassBeginInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupRenderPassBeginInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupRenderPassBeginInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceMask, deviceRenderAreaCount, pDeviceRenderAreas ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupRenderPassBeginInfo const & ) const = default; #else bool operator==( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMask == rhs.deviceMask ) && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); # endif } bool operator!=( DeviceGroupRenderPassBeginInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; const void * pNext = {}; uint32_t deviceMask = {}; uint32_t deviceRenderAreaCount = {}; const Rect2D * pDeviceRenderAreas = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupRenderPassBeginInfo; }; #endif template <> struct CppType { using Type = DeviceGroupRenderPassBeginInfo; }; using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; // wrapper struct for struct VkDeviceGroupSubmitInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSubmitInfo.html struct DeviceGroupSubmitInfo { using NativeType = VkDeviceGroupSubmitInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, const uint32_t * pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t * pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , waitSemaphoreCount{ waitSemaphoreCount_ } , pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ } , commandBufferCount{ commandBufferCount_ } , pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ } , signalSemaphoreCount{ signalSemaphoreCount_ } , pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupSubmitInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupSubmitInfo( ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_, ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ = {}, ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , waitSemaphoreCount( static_cast( waitSemaphoreDeviceIndices_.size() ) ) , pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ) , commandBufferCount( static_cast( commandBufferDeviceMasks_.size() ) ) , pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ) , signalSemaphoreCount( static_cast( signalSemaphoreDeviceIndices_.size() ) ) , pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) & VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) && VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = waitSemaphoreCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) & VULKAN_HPP_NOEXCEPT { pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setPWaitSemaphoreDeviceIndices( const uint32_t * pWaitSemaphoreDeviceIndices_ ) && VULKAN_HPP_NOEXCEPT { pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( ArrayProxyNoTemporaries const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { waitSemaphoreCount = static_cast( waitSemaphoreDeviceIndices_.size() ); pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) & VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setCommandBufferCount( uint32_t commandBufferCount_ ) && VULKAN_HPP_NOEXCEPT { commandBufferCount = commandBufferCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) & VULKAN_HPP_NOEXCEPT { pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setPCommandBufferDeviceMasks( const uint32_t * pCommandBufferDeviceMasks_ ) && VULKAN_HPP_NOEXCEPT { pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( ArrayProxyNoTemporaries const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT { commandBufferCount = static_cast( commandBufferDeviceMasks_.size() ); pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) & VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) && VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = signalSemaphoreCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) & VULKAN_HPP_NOEXCEPT { pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo && setPSignalSemaphoreDeviceIndices( const uint32_t * pSignalSemaphoreDeviceIndices_ ) && VULKAN_HPP_NOEXCEPT { pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( ArrayProxyNoTemporaries const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT { signalSemaphoreCount = static_cast( signalSemaphoreDeviceIndices_.size() ); pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupSubmitInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupSubmitInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupSubmitInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, waitSemaphoreCount, pWaitSemaphoreDeviceIndices, commandBufferCount, pCommandBufferDeviceMasks, signalSemaphoreCount, pSignalSemaphoreDeviceIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupSubmitInfo const & ) const = default; #else bool operator==( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) && ( commandBufferCount == rhs.commandBufferCount ) && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); # endif } bool operator!=( DeviceGroupSubmitInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupSubmitInfo; const void * pNext = {}; uint32_t waitSemaphoreCount = {}; const uint32_t * pWaitSemaphoreDeviceIndices = {}; uint32_t commandBufferCount = {}; const uint32_t * pCommandBufferDeviceMasks = {}; uint32_t signalSemaphoreCount = {}; const uint32_t * pSignalSemaphoreDeviceIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupSubmitInfo; }; #endif template <> struct CppType { using Type = DeviceGroupSubmitInfo; }; using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; // wrapper struct for struct VkDeviceGroupSwapchainCreateInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html struct DeviceGroupSwapchainCreateInfoKHR { using NativeType = VkDeviceGroupSwapchainCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupPresentModeFlagsKHR modes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , modes{ modes_ } { } VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & setModes( DeviceGroupPresentModeFlagsKHR modes_ ) & VULKAN_HPP_NOEXCEPT { modes = modes_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR && setModes( DeviceGroupPresentModeFlagsKHR modes_ ) && VULKAN_HPP_NOEXCEPT { modes = modes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceGroupSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceGroupSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceGroupSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, modes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const & ) const = default; #else bool operator==( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( modes == rhs.modes ); # endif } bool operator!=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; const void * pNext = {}; DeviceGroupPresentModeFlagsKHR modes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceGroupSwapchainCreateInfoKHR; }; #endif template <> struct CppType { using Type = DeviceGroupSwapchainCreateInfoKHR; }; // wrapper struct for struct VkImageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCreateInfo.html struct ImageCreateInfo { using NativeType = VkImageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateFlags flags_ = {}, ImageType imageType_ = ImageType::e1D, Format format_ = Format::eUndefined, Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, SampleCountFlagBits samples_ = SampleCountFlagBits::e1, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = {}, SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, ImageLayout initialLayout_ = ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , imageType{ imageType_ } , format{ format_ } , extent{ extent_ } , mipLevels{ mipLevels_ } , arrayLayers{ arrayLayers_ } , samples{ samples_ } , tiling{ tiling_ } , usage{ usage_ } , sharingMode{ sharingMode_ } , queueFamilyIndexCount{ queueFamilyIndexCount_ } , pQueueFamilyIndices{ pQueueFamilyIndices_ } , initialLayout{ initialLayout_ } { } VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCreateInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageCreateInfo( ImageCreateFlags flags_, ImageType imageType_, Format format_, Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, SampleCountFlagBits samples_, ImageTiling tiling_, ImageUsageFlags usage_, SharingMode sharingMode_, ArrayProxyNoTemporaries const & queueFamilyIndices_, ImageLayout initialLayout_ = ImageLayout::eUndefined, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , imageType( imageType_ ) , format( format_ ) , extent( extent_ ) , mipLevels( mipLevels_ ) , arrayLayers( arrayLayers_ ) , samples( samples_ ) , tiling( tiling_ ) , usage( usage_ ) , sharingMode( sharingMode_ ) , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) , pQueueFamilyIndices( queueFamilyIndices_.data() ) , initialLayout( initialLayout_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFlags( ImageCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setFlags( ImageCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setImageType( ImageType imageType_ ) & VULKAN_HPP_NOEXCEPT { imageType = imageType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setImageType( ImageType imageType_ ) && VULKAN_HPP_NOEXCEPT { imageType = imageType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) & VULKAN_HPP_NOEXCEPT { mipLevels = mipLevels_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setMipLevels( uint32_t mipLevels_ ) && VULKAN_HPP_NOEXCEPT { mipLevels = mipLevels_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) & VULKAN_HPP_NOEXCEPT { arrayLayers = arrayLayers_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setArrayLayers( uint32_t arrayLayers_ ) && VULKAN_HPP_NOEXCEPT { arrayLayers = arrayLayers_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSamples( SampleCountFlagBits samples_ ) & VULKAN_HPP_NOEXCEPT { samples = samples_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setSamples( SampleCountFlagBits samples_ ) && VULKAN_HPP_NOEXCEPT { samples = samples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setTiling( ImageTiling tiling_ ) & VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setTiling( ImageTiling tiling_ ) && VULKAN_HPP_NOEXCEPT { tiling = tiling_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setUsage( ImageUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setUsage( ImageUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setSharingMode( SharingMode sharingMode_ ) & VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setSharingMode( SharingMode sharingMode_ ) && VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) & VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) && VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageCreateInfo & setQueueFamilyIndices( ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & setInitialLayout( ImageLayout initialLayout_ ) & VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo && setInitialLayout( ImageLayout initialLayout_ ) && VULKAN_HPP_NOEXCEPT { initialLayout = initialLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, imageType, format, extent, mipLevels, arrayLayers, samples, tiling, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices, initialLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCreateInfo const & ) const = default; #else bool operator==( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( imageType == rhs.imageType ) && ( format == rhs.format ) && ( extent == rhs.extent ) && ( mipLevels == rhs.mipLevels ) && ( arrayLayers == rhs.arrayLayers ) && ( samples == rhs.samples ) && ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) && ( initialLayout == rhs.initialLayout ); # endif } bool operator!=( ImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageCreateInfo; const void * pNext = {}; ImageCreateFlags flags = {}; ImageType imageType = ImageType::e1D; Format format = Format::eUndefined; Extent3D extent = {}; uint32_t mipLevels = {}; uint32_t arrayLayers = {}; SampleCountFlagBits samples = SampleCountFlagBits::e1; ImageTiling tiling = ImageTiling::eOptimal; ImageUsageFlags usage = {}; SharingMode sharingMode = SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t * pQueueFamilyIndices = {}; ImageLayout initialLayout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCreateInfo; }; #endif template <> struct CppType { using Type = ImageCreateInfo; }; // wrapper struct for struct VkDeviceImageMemoryRequirements, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageMemoryRequirements.html struct DeviceImageMemoryRequirements { using NativeType = VkDeviceImageMemoryRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageMemoryRequirements; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( const ImageCreateInfo * pCreateInfo_ = {}, ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pCreateInfo{ pCreateInfo_ } , planeAspect{ planeAspect_ } { } VULKAN_HPP_CONSTEXPR DeviceImageMemoryRequirements( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceImageMemoryRequirements( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceImageMemoryRequirements( *reinterpret_cast( &rhs ) ) { } DeviceImageMemoryRequirements & operator=( DeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceImageMemoryRequirements & operator=( VkDeviceImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) & VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements && setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) && VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) & VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageMemoryRequirements && setPlaneAspect( ImageAspectFlagBits planeAspect_ ) && VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceImageMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceImageMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceImageMemoryRequirements *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pCreateInfo, planeAspect ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceImageMemoryRequirements const & ) const = default; #else bool operator==( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( planeAspect == rhs.planeAspect ); # endif } bool operator!=( DeviceImageMemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceImageMemoryRequirements; const void * pNext = {}; const ImageCreateInfo * pCreateInfo = {}; ImageAspectFlagBits planeAspect = ImageAspectFlagBits::eColor; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceImageMemoryRequirements; }; #endif template <> struct CppType { using Type = DeviceImageMemoryRequirements; }; using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; // wrapper struct for struct VkImageSubresource2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSubresource2.html struct ImageSubresource2 { using NativeType = VkImageSubresource2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageSubresource{ imageSubresource_ } { } VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource2( *reinterpret_cast( &rhs ) ) {} ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( ImageSubresource const & imageSubresource_ ) & VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 && setImageSubresource( ImageSubresource const & imageSubresource_ ) && VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSubresource2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSubresource2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageSubresource ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSubresource2 const & ) const = default; #else bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSubresource == rhs.imageSubresource ); # endif } bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageSubresource2; void * pNext = {}; ImageSubresource imageSubresource = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSubresource2; }; #endif template <> struct CppType { using Type = ImageSubresource2; }; using ImageSubresource2EXT = ImageSubresource2; using ImageSubresource2KHR = ImageSubresource2; // wrapper struct for struct VkDeviceImageSubresourceInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceImageSubresourceInfo.html struct DeviceImageSubresourceInfo { using NativeType = VkDeviceImageSubresourceInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( const ImageCreateInfo * pCreateInfo_ = {}, const ImageSubresource2 * pSubresource_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pCreateInfo{ pCreateInfo_ } , pSubresource{ pSubresource_ } { } VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceImageSubresourceInfo( *reinterpret_cast( &rhs ) ) { } DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) & VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo && setPCreateInfo( const ImageCreateInfo * pCreateInfo_ ) && VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const ImageSubresource2 * pSubresource_ ) & VULKAN_HPP_NOEXCEPT { pSubresource = pSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo && setPSubresource( const ImageSubresource2 * pSubresource_ ) && VULKAN_HPP_NOEXCEPT { pSubresource = pSubresource_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceImageSubresourceInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceImageSubresourceInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pCreateInfo, pSubresource ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceImageSubresourceInfo const & ) const = default; #else bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ) && ( pSubresource == rhs.pSubresource ); # endif } bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceImageSubresourceInfo; const void * pNext = {}; const ImageCreateInfo * pCreateInfo = {}; const ImageSubresource2 * pSubresource = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceImageSubresourceInfo; }; #endif template <> struct CppType { using Type = DeviceImageSubresourceInfo; }; using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; // wrapper struct for struct VkDeviceMemoryOpaqueCaptureAddressInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOpaqueCaptureAddressInfo.html struct DeviceMemoryOpaqueCaptureAddressInfo { using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } { } VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast( &rhs ) ) { } DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceMemoryOpaqueCaptureAddressInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryOpaqueCaptureAddressInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceMemoryOpaqueCaptureAddressInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const & ) const = default; #else bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); # endif } bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo; const void * pNext = {}; DeviceMemory memory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceMemoryOpaqueCaptureAddressInfo; }; #endif template <> struct CppType { using Type = DeviceMemoryOpaqueCaptureAddressInfo; }; using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo; // wrapper struct for struct VkDeviceMemoryOverallocationCreateInfoAMD, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html struct DeviceMemoryOverallocationCreateInfoAMD { using NativeType = VkDeviceMemoryOverallocationCreateInfoAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( MemoryOverallocationBehaviorAMD overallocationBehavior_ = MemoryOverallocationBehaviorAMD::eDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , overallocationBehavior{ overallocationBehavior_ } { } VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast( &rhs ) ) { } DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( MemoryOverallocationBehaviorAMD overallocationBehavior_ ) & VULKAN_HPP_NOEXCEPT { overallocationBehavior = overallocationBehavior_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD && setOverallocationBehavior( MemoryOverallocationBehaviorAMD overallocationBehavior_ ) && VULKAN_HPP_NOEXCEPT { overallocationBehavior = overallocationBehavior_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceMemoryOverallocationCreateInfoAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceMemoryOverallocationCreateInfoAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceMemoryOverallocationCreateInfoAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, overallocationBehavior ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const & ) const = default; #else bool operator==( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( overallocationBehavior == rhs.overallocationBehavior ); # endif } bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; const void * pNext = {}; MemoryOverallocationBehaviorAMD overallocationBehavior = MemoryOverallocationBehaviorAMD::eDefault; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceMemoryOverallocationCreateInfoAMD; }; #endif template <> struct CppType { using Type = DeviceMemoryOverallocationCreateInfoAMD; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) union DeviceOrHostAddressConstAMDX { using NativeType = VkDeviceOrHostAddressConstAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( DeviceAddress deviceAddress_ = {} ) : deviceAddress( deviceAddress_ ) {} VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX( const void * hostAddress_ ) : hostAddress( hostAddress_ ) {} # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX & setHostAddress( const void * hostAddress_ ) & VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceOrHostAddressConstAMDX && setHostAddress( const void * hostAddress_ ) && VULKAN_HPP_NOEXCEPT { hostAddress = hostAddress_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceOrHostAddressConstAMDX const &() const { return *reinterpret_cast( this ); } operator VkDeviceOrHostAddressConstAMDX &() { return *reinterpret_cast( this ); } # ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS DeviceAddress deviceAddress; const void * hostAddress; # else VkDeviceAddress deviceAddress; const void * hostAddress; # endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceOrHostAddressConstAMDX; }; # endif #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkDevicePipelineBinaryInternalCacheControlKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePipelineBinaryInternalCacheControlKHR.html struct DevicePipelineBinaryInternalCacheControlKHR { using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( Bool32 disableInternalCache_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , disableInternalCache{ disableInternalCache_ } { } VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast( &rhs ) ) { } DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setDisableInternalCache( Bool32 disableInternalCache_ ) & VULKAN_HPP_NOEXCEPT { disableInternalCache = disableInternalCache_; return *this; } VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR && setDisableInternalCache( Bool32 disableInternalCache_ ) && VULKAN_HPP_NOEXCEPT { disableInternalCache = disableInternalCache_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDevicePipelineBinaryInternalCacheControlKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDevicePipelineBinaryInternalCacheControlKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, disableInternalCache ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default; #else bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disableInternalCache == rhs.disableInternalCache ); # endif } bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; const void * pNext = {}; Bool32 disableInternalCache = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DevicePipelineBinaryInternalCacheControlKHR; }; #endif template <> struct CppType { using Type = DevicePipelineBinaryInternalCacheControlKHR; }; // wrapper struct for struct VkDevicePrivateDataCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDevicePrivateDataCreateInfo.html struct DevicePrivateDataCreateInfo { using NativeType = VkDevicePrivateDataCreateInfo; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , privateDataSlotRequestCount{ privateDataSlotRequestCount_ } { } VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DevicePrivateDataCreateInfo( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DevicePrivateDataCreateInfo( *reinterpret_cast( &rhs ) ) { } DevicePrivateDataCreateInfo & operator=( DevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DevicePrivateDataCreateInfo & operator=( VkDevicePrivateDataCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) & VULKAN_HPP_NOEXCEPT { privateDataSlotRequestCount = privateDataSlotRequestCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfo && setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) && VULKAN_HPP_NOEXCEPT { privateDataSlotRequestCount = privateDataSlotRequestCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDevicePrivateDataCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDevicePrivateDataCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDevicePrivateDataCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDevicePrivateDataCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, privateDataSlotRequestCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DevicePrivateDataCreateInfo const & ) const = default; #else bool operator==( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount ); # endif } bool operator!=( DevicePrivateDataCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDevicePrivateDataCreateInfo; const void * pNext = {}; uint32_t privateDataSlotRequestCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DevicePrivateDataCreateInfo; }; #endif template <> struct CppType { using Type = DevicePrivateDataCreateInfo; }; using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; // wrapper struct for struct VkDeviceQueueGlobalPriorityCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueGlobalPriorityCreateInfo.html struct DeviceQueueGlobalPriorityCreateInfo { using NativeType = VkDeviceQueueGlobalPriorityCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( QueueGlobalPriority globalPriority_ = QueueGlobalPriority::eLow, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , globalPriority{ globalPriority_ } { } VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast( &rhs ) ) { } DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setGlobalPriority( QueueGlobalPriority globalPriority_ ) & VULKAN_HPP_NOEXCEPT { globalPriority = globalPriority_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo && setGlobalPriority( QueueGlobalPriority globalPriority_ ) && VULKAN_HPP_NOEXCEPT { globalPriority = globalPriority_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueGlobalPriorityCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceQueueGlobalPriorityCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, globalPriority ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default; #else bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriority == rhs.globalPriority ); # endif } bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; const void * pNext = {}; QueueGlobalPriority globalPriority = QueueGlobalPriority::eLow; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceQueueGlobalPriorityCreateInfo; }; #endif template <> struct CppType { using Type = DeviceQueueGlobalPriorityCreateInfo; }; using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; // wrapper struct for struct VkDeviceQueueInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueInfo2.html struct DeviceQueueInfo2 { using NativeType = VkDeviceQueueInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , queueFamilyIndex{ queueFamilyIndex_ } , queueIndex{ queueIndex_ } { } VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueInfo2( *reinterpret_cast( &rhs ) ) {} DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setFlags( DeviceQueueCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 && setFlags( DeviceQueueCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 && setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) & VULKAN_HPP_NOEXCEPT { queueIndex = queueIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 && setQueueIndex( uint32_t queueIndex_ ) && VULKAN_HPP_NOEXCEPT { queueIndex = queueIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceQueueInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceQueueInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, queueFamilyIndex, queueIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceQueueInfo2 const & ) const = default; #else bool operator==( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( queueIndex == rhs.queueIndex ); # endif } bool operator!=( DeviceQueueInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceQueueInfo2; const void * pNext = {}; DeviceQueueCreateFlags flags = {}; uint32_t queueFamilyIndex = {}; uint32_t queueIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceQueueInfo2; }; #endif template <> struct CppType { using Type = DeviceQueueInfo2; }; // wrapper struct for struct VkDeviceQueueShaderCoreControlCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceQueueShaderCoreControlCreateInfoARM.html struct DeviceQueueShaderCoreControlCreateInfoARM { using NativeType = VkDeviceQueueShaderCoreControlCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderCoreCount{ shaderCoreCount_ } { } VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceQueueShaderCoreControlCreateInfoARM( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceQueueShaderCoreControlCreateInfoARM( *reinterpret_cast( &rhs ) ) { } DeviceQueueShaderCoreControlCreateInfoARM & operator=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceQueueShaderCoreControlCreateInfoARM & operator=( VkDeviceQueueShaderCoreControlCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM & setShaderCoreCount( uint32_t shaderCoreCount_ ) & VULKAN_HPP_NOEXCEPT { shaderCoreCount = shaderCoreCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceQueueShaderCoreControlCreateInfoARM && setShaderCoreCount( uint32_t shaderCoreCount_ ) && VULKAN_HPP_NOEXCEPT { shaderCoreCount = shaderCoreCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceQueueShaderCoreControlCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueShaderCoreControlCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceQueueShaderCoreControlCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceQueueShaderCoreControlCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderCoreCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceQueueShaderCoreControlCreateInfoARM const & ) const = default; #else bool operator==( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCoreCount == rhs.shaderCoreCount ); # endif } bool operator!=( DeviceQueueShaderCoreControlCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceQueueShaderCoreControlCreateInfoARM; void * pNext = {}; uint32_t shaderCoreCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceQueueShaderCoreControlCreateInfoARM; }; #endif template <> struct CppType { using Type = DeviceQueueShaderCoreControlCreateInfoARM; }; // wrapper struct for struct VkTensorDescriptionARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorDescriptionARM.html struct TensorDescriptionARM { using NativeType = VkTensorDescriptionARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorDescriptionARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR TensorDescriptionARM( TensorTilingARM tiling_ = TensorTilingARM::eOptimal, Format format_ = Format::eUndefined, uint32_t dimensionCount_ = {}, const int64_t * pDimensions_ = {}, const int64_t * pStrides_ = {}, TensorUsageFlagsARM usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tiling{ tiling_ } , format{ format_ } , dimensionCount{ dimensionCount_ } , pDimensions{ pDimensions_ } , pStrides{ pStrides_ } , usage{ usage_ } { } VULKAN_HPP_CONSTEXPR TensorDescriptionARM( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; TensorDescriptionARM( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT : TensorDescriptionARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorDescriptionARM( TensorTilingARM tiling_, Format format_, ArrayProxyNoTemporaries const & dimensions_, ArrayProxyNoTemporaries const & strides_ = {}, TensorUsageFlagsARM usage_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , tiling( tiling_ ) , format( format_ ) , dimensionCount( static_cast( dimensions_.size() ) ) , pDimensions( dimensions_.data() ) , pStrides( strides_.data() ) , usage( usage_ ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( strides_.empty() || ( dimensions_.size() == strides_.size() ) ); # else if ( !strides_.empty() && ( dimensions_.size() != strides_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::TensorDescriptionARM::TensorDescriptionARM: !strides_.empty() && ( dimensions_.size() != strides_.size() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ TensorDescriptionARM & operator=( TensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ TensorDescriptionARM & operator=( VkTensorDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setTiling( TensorTilingARM tiling_ ) & VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setTiling( TensorTilingARM tiling_ ) && VULKAN_HPP_NOEXCEPT { tiling = tiling_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setDimensionCount( uint32_t dimensionCount_ ) & VULKAN_HPP_NOEXCEPT { dimensionCount = dimensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setDimensionCount( uint32_t dimensionCount_ ) && VULKAN_HPP_NOEXCEPT { dimensionCount = dimensionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPDimensions( const int64_t * pDimensions_ ) & VULKAN_HPP_NOEXCEPT { pDimensions = pDimensions_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setPDimensions( const int64_t * pDimensions_ ) && VULKAN_HPP_NOEXCEPT { pDimensions = pDimensions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorDescriptionARM & setDimensions( ArrayProxyNoTemporaries const & dimensions_ ) VULKAN_HPP_NOEXCEPT { dimensionCount = static_cast( dimensions_.size() ); pDimensions = dimensions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setPStrides( const int64_t * pStrides_ ) & VULKAN_HPP_NOEXCEPT { pStrides = pStrides_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setPStrides( const int64_t * pStrides_ ) && VULKAN_HPP_NOEXCEPT { pStrides = pStrides_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorDescriptionARM & setStrides( ArrayProxyNoTemporaries const & strides_ ) VULKAN_HPP_NOEXCEPT { dimensionCount = static_cast( strides_.size() ); pStrides = strides_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM & setUsage( TensorUsageFlagsARM usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorDescriptionARM && setUsage( TensorUsageFlagsARM usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkTensorDescriptionARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorDescriptionARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorDescriptionARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkTensorDescriptionARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tiling, format, dimensionCount, pDimensions, pStrides, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( TensorDescriptionARM const & ) const = default; #else bool operator==( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tiling == rhs.tiling ) && ( format == rhs.format ) && ( dimensionCount == rhs.dimensionCount ) && ( pDimensions == rhs.pDimensions ) && ( pStrides == rhs.pStrides ) && ( usage == rhs.usage ); # endif } bool operator!=( TensorDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eTensorDescriptionARM; const void * pNext = {}; TensorTilingARM tiling = TensorTilingARM::eOptimal; Format format = Format::eUndefined; uint32_t dimensionCount = {}; const int64_t * pDimensions = {}; const int64_t * pStrides = {}; TensorUsageFlagsARM usage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = TensorDescriptionARM; }; #endif template <> struct CppType { using Type = TensorDescriptionARM; }; // wrapper struct for struct VkTensorCreateInfoARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkTensorCreateInfoARM.html struct TensorCreateInfoARM { using NativeType = VkTensorCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTensorCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR TensorCreateInfoARM( TensorCreateFlagsARM flags_ = {}, const TensorDescriptionARM * pDescription_ = {}, SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pDescription{ pDescription_ } , sharingMode{ sharingMode_ } , queueFamilyIndexCount{ queueFamilyIndexCount_ } , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } VULKAN_HPP_CONSTEXPR TensorCreateInfoARM( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; TensorCreateInfoARM( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : TensorCreateInfoARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCreateInfoARM( TensorCreateFlagsARM flags_, const TensorDescriptionARM * pDescription_, SharingMode sharingMode_, ArrayProxyNoTemporaries const & queueFamilyIndices_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , pDescription( pDescription_ ) , sharingMode( sharingMode_ ) , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) , pQueueFamilyIndices( queueFamilyIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ TensorCreateInfoARM & operator=( TensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ TensorCreateInfoARM & operator=( VkTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setFlags( TensorCreateFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setFlags( TensorCreateFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPDescription( const TensorDescriptionARM * pDescription_ ) & VULKAN_HPP_NOEXCEPT { pDescription = pDescription_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setPDescription( const TensorDescriptionARM * pDescription_ ) && VULKAN_HPP_NOEXCEPT { pDescription = pDescription_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setSharingMode( SharingMode sharingMode_ ) & VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setSharingMode( SharingMode sharingMode_ ) && VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) & VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 TensorCreateInfoARM && setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) && VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) TensorCreateInfoARM & setQueueFamilyIndices( ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pDescription, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( TensorCreateInfoARM const & ) const = default; #else bool operator==( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pDescription == rhs.pDescription ) && ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); # endif } bool operator!=( TensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eTensorCreateInfoARM; const void * pNext = {}; TensorCreateFlagsARM flags = {}; const TensorDescriptionARM * pDescription = {}; SharingMode sharingMode = SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t * pQueueFamilyIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = TensorCreateInfoARM; }; #endif template <> struct CppType { using Type = TensorCreateInfoARM; }; // wrapper struct for struct VkDeviceTensorMemoryRequirementsARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDeviceTensorMemoryRequirementsARM.html struct DeviceTensorMemoryRequirementsARM { using NativeType = VkDeviceTensorMemoryRequirementsARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceTensorMemoryRequirementsARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM( const TensorCreateInfoARM * pCreateInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pCreateInfo{ pCreateInfo_ } { } VULKAN_HPP_CONSTEXPR DeviceTensorMemoryRequirementsARM( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DeviceTensorMemoryRequirementsARM( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT : DeviceTensorMemoryRequirementsARM( *reinterpret_cast( &rhs ) ) { } DeviceTensorMemoryRequirementsARM & operator=( DeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DeviceTensorMemoryRequirementsARM & operator=( VkDeviceTensorMemoryRequirementsARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM & setPCreateInfo( const TensorCreateInfoARM * pCreateInfo_ ) & VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 DeviceTensorMemoryRequirementsARM && setPCreateInfo( const TensorCreateInfoARM * pCreateInfo_ ) && VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDeviceTensorMemoryRequirementsARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceTensorMemoryRequirementsARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDeviceTensorMemoryRequirementsARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDeviceTensorMemoryRequirementsARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pCreateInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceTensorMemoryRequirementsARM const & ) const = default; #else bool operator==( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pCreateInfo == rhs.pCreateInfo ); # endif } bool operator!=( DeviceTensorMemoryRequirementsARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDeviceTensorMemoryRequirementsARM; const void * pNext = {}; const TensorCreateInfoARM * pCreateInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DeviceTensorMemoryRequirementsARM; }; #endif template <> struct CppType { using Type = DeviceTensorMemoryRequirementsARM; }; typedef PFN_vkVoidFunction( VKAPI_PTR * PFN_GetInstanceProcAddrLUNARG )( Instance instance, const char * pName ); // wrapper struct for struct VkDirectDriverLoadingInfoLUNARG, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingInfoLUNARG.html struct DirectDriverLoadingInfoLUNARG { using NativeType = VkDirectDriverLoadingInfoLUNARG; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingInfoLUNARG; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingFlagsLUNARG flags_ = {}, PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ } { } VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; DirectDriverLoadingInfoLUNARG( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT : DirectDriverLoadingInfoLUNARG( *reinterpret_cast( &rhs ) ) { } DirectDriverLoadingInfoLUNARG & operator=( DirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DirectDriverLoadingInfoLUNARG & operator=( VkDirectDriverLoadingInfoLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setFlags( DirectDriverLoadingFlagsLUNARG flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG && setFlags( DirectDriverLoadingFlagsLUNARG flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & setPfnGetInstanceProcAddr( PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) & VULKAN_HPP_NOEXCEPT { pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG && setPfnGetInstanceProcAddr( PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) && VULKAN_HPP_NOEXCEPT { pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDirectDriverLoadingInfoLUNARG const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectDriverLoadingInfoLUNARG &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectDriverLoadingInfoLUNARG const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDirectDriverLoadingInfoLUNARG *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pfnGetInstanceProcAddr ); } #endif bool operator==( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pfnGetInstanceProcAddr == rhs.pfnGetInstanceProcAddr ); #endif } bool operator!=( DirectDriverLoadingInfoLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG; void * pNext = {}; DirectDriverLoadingFlagsLUNARG flags = {}; PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DirectDriverLoadingInfoLUNARG; }; #endif template <> struct CppType { using Type = DirectDriverLoadingInfoLUNARG; }; // wrapper struct for struct VkDirectDriverLoadingListLUNARG, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectDriverLoadingListLUNARG.html struct DirectDriverLoadingListLUNARG { using NativeType = VkDirectDriverLoadingListLUNARG; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectDriverLoadingListLUNARG; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingModeLUNARG mode_ = DirectDriverLoadingModeLUNARG::eExclusive, uint32_t driverCount_ = {}, const DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mode{ mode_ } , driverCount{ driverCount_ } , pDrivers{ pDrivers_ } { } VULKAN_HPP_CONSTEXPR DirectDriverLoadingListLUNARG( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; DirectDriverLoadingListLUNARG( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT : DirectDriverLoadingListLUNARG( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DirectDriverLoadingListLUNARG( DirectDriverLoadingModeLUNARG mode_, ArrayProxyNoTemporaries const & drivers_, const void * pNext_ = nullptr ) : pNext( pNext_ ), mode( mode_ ), driverCount( static_cast( drivers_.size() ) ), pDrivers( drivers_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ DirectDriverLoadingListLUNARG & operator=( DirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DirectDriverLoadingListLUNARG & operator=( VkDirectDriverLoadingListLUNARG const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setMode( DirectDriverLoadingModeLUNARG mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG && setMode( DirectDriverLoadingModeLUNARG mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setDriverCount( uint32_t driverCount_ ) & VULKAN_HPP_NOEXCEPT { driverCount = driverCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG && setDriverCount( uint32_t driverCount_ ) && VULKAN_HPP_NOEXCEPT { driverCount = driverCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG & setPDrivers( const DirectDriverLoadingInfoLUNARG * pDrivers_ ) & VULKAN_HPP_NOEXCEPT { pDrivers = pDrivers_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingListLUNARG && setPDrivers( const DirectDriverLoadingInfoLUNARG * pDrivers_ ) && VULKAN_HPP_NOEXCEPT { pDrivers = pDrivers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) DirectDriverLoadingListLUNARG & setDrivers( ArrayProxyNoTemporaries const & drivers_ ) VULKAN_HPP_NOEXCEPT { driverCount = static_cast( drivers_.size() ); pDrivers = drivers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDirectDriverLoadingListLUNARG const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectDriverLoadingListLUNARG &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectDriverLoadingListLUNARG const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDirectDriverLoadingListLUNARG *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mode, driverCount, pDrivers ); } #endif bool operator==( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { #if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); #else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( driverCount == rhs.driverCount ) && ( pDrivers == rhs.pDrivers ); #endif } bool operator!=( DirectDriverLoadingListLUNARG const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eDirectDriverLoadingListLUNARG; const void * pNext = {}; DirectDriverLoadingModeLUNARG mode = DirectDriverLoadingModeLUNARG::eExclusive; uint32_t driverCount = {}; const DirectDriverLoadingInfoLUNARG * pDrivers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DirectDriverLoadingListLUNARG; }; #endif template <> struct CppType { using Type = DirectDriverLoadingListLUNARG; }; #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) // wrapper struct for struct VkDirectFBSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDirectFBSurfaceCreateInfoEXT.html struct DirectFBSurfaceCreateInfoEXT { using NativeType = VkDirectFBSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB * dfb_ = {}, IDirectFBSurface * surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , dfb{ dfb_ } , surface{ surface_ } { } VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setFlags( DirectFBSurfaceCreateFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT && setFlags( DirectFBSurfaceCreateFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB * dfb_ ) & VULKAN_HPP_NOEXCEPT { dfb = dfb_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT && setDfb( IDirectFB * dfb_ ) && VULKAN_HPP_NOEXCEPT { dfb = dfb_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface * surface_ ) & VULKAN_HPP_NOEXCEPT { surface = surface_; return *this; } VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT && setSurface( IDirectFBSurface * surface_ ) && VULKAN_HPP_NOEXCEPT { surface = surface_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDirectFBSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDirectFBSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDirectFBSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, dfb, surface ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DirectFBSurfaceCreateInfoEXT const & ) const = default; # else bool operator==( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dfb == rhs.dfb ) && ( surface == rhs.surface ); # endif } bool operator!=( DirectFBSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT; const void * pNext = {}; DirectFBSurfaceCreateFlagsEXT flags = {}; IDirectFB * dfb = {}; IDirectFBSurface * surface = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DirectFBSurfaceCreateInfoEXT; }; # endif template <> struct CppType { using Type = DirectFBSurfaceCreateInfoEXT; }; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkDispatchGraphCountInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphCountInfoAMDX.html struct DispatchGraphCountInfoAMDX { using NativeType = VkDispatchGraphCountInfoAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, DeviceOrHostAddressConstAMDX infos_ = {}, uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT : count{ count_ } , infos{ infos_ } , stride{ stride_ } { } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; DispatchGraphCountInfoAMDX( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : DispatchGraphCountInfoAMDX( *reinterpret_cast( &rhs ) ) { } DispatchGraphCountInfoAMDX & operator=( DispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DispatchGraphCountInfoAMDX & operator=( VkDispatchGraphCountInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setCount( uint32_t count_ ) & VULKAN_HPP_NOEXCEPT { count = count_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX && setCount( uint32_t count_ ) && VULKAN_HPP_NOEXCEPT { count = count_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setInfos( DeviceOrHostAddressConstAMDX const & infos_ ) & VULKAN_HPP_NOEXCEPT { infos = infos_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX && setInfos( DeviceOrHostAddressConstAMDX const & infos_ ) && VULKAN_HPP_NOEXCEPT { infos = infos_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX & setStride( uint64_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX && setStride( uint64_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDispatchGraphCountInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchGraphCountInfoAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchGraphCountInfoAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDispatchGraphCountInfoAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( count, infos, stride ); } # endif public: uint32_t count = {}; DeviceOrHostAddressConstAMDX infos = {}; uint64_t stride = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DispatchGraphCountInfoAMDX; }; # endif #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkDispatchGraphInfoAMDX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchGraphInfoAMDX.html struct DispatchGraphInfoAMDX { using NativeType = VkDispatchGraphInfoAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( uint32_t nodeIndex_ = {}, uint32_t payloadCount_ = {}, DeviceOrHostAddressConstAMDX payloads_ = {}, uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT : nodeIndex{ nodeIndex_ } , payloadCount{ payloadCount_ } , payloads{ payloads_ } , payloadStride{ payloadStride_ } { } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; DispatchGraphInfoAMDX( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : DispatchGraphInfoAMDX( *reinterpret_cast( &rhs ) ) { } DispatchGraphInfoAMDX & operator=( DispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DispatchGraphInfoAMDX & operator=( VkDispatchGraphInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setNodeIndex( uint32_t nodeIndex_ ) & VULKAN_HPP_NOEXCEPT { nodeIndex = nodeIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX && setNodeIndex( uint32_t nodeIndex_ ) && VULKAN_HPP_NOEXCEPT { nodeIndex = nodeIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadCount( uint32_t payloadCount_ ) & VULKAN_HPP_NOEXCEPT { payloadCount = payloadCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX && setPayloadCount( uint32_t payloadCount_ ) && VULKAN_HPP_NOEXCEPT { payloadCount = payloadCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloads( DeviceOrHostAddressConstAMDX const & payloads_ ) & VULKAN_HPP_NOEXCEPT { payloads = payloads_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX && setPayloads( DeviceOrHostAddressConstAMDX const & payloads_ ) && VULKAN_HPP_NOEXCEPT { payloads = payloads_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX & setPayloadStride( uint64_t payloadStride_ ) & VULKAN_HPP_NOEXCEPT { payloadStride = payloadStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchGraphInfoAMDX && setPayloadStride( uint64_t payloadStride_ ) && VULKAN_HPP_NOEXCEPT { payloadStride = payloadStride_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDispatchGraphInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchGraphInfoAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchGraphInfoAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDispatchGraphInfoAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( nodeIndex, payloadCount, payloads, payloadStride ); } # endif public: uint32_t nodeIndex = {}; uint32_t payloadCount = {}; DeviceOrHostAddressConstAMDX payloads = {}; uint64_t payloadStride = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DispatchGraphInfoAMDX; }; # endif #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkDispatchIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchIndirectCommand.html struct DispatchIndirectCommand { using NativeType = VkDispatchIndirectCommand; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } , z{ z_ } { } VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DispatchIndirectCommand( *reinterpret_cast( &rhs ) ) { } DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setX( uint32_t x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand && setX( uint32_t x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setY( uint32_t y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand && setY( uint32_t y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & setZ( uint32_t z_ ) & VULKAN_HPP_NOEXCEPT { z = z_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand && setZ( uint32_t z_ ) && VULKAN_HPP_NOEXCEPT { z = z_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDispatchIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchIndirectCommand const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDispatchIndirectCommand *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y, z ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DispatchIndirectCommand const & ) const = default; #else bool operator==( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ) && ( z == rhs.z ); # endif } bool operator!=( DispatchIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t x = {}; uint32_t y = {}; uint32_t z = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DispatchIndirectCommand; }; #endif // wrapper struct for struct VkDispatchTileInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDispatchTileInfoQCOM.html struct DispatchTileInfoQCOM { using NativeType = VkDispatchTileInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDispatchTileInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR DispatchTileInfoQCOM( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; DispatchTileInfoQCOM( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : DispatchTileInfoQCOM( *reinterpret_cast( &rhs ) ) { } DispatchTileInfoQCOM & operator=( DispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DispatchTileInfoQCOM & operator=( VkDispatchTileInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DispatchTileInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DispatchTileInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDispatchTileInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchTileInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDispatchTileInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDispatchTileInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DispatchTileInfoQCOM const & ) const = default; #else bool operator==( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( DispatchTileInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDispatchTileInfoQCOM; const void * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DispatchTileInfoQCOM; }; #endif template <> struct CppType { using Type = DispatchTileInfoQCOM; }; // wrapper struct for struct VkDisplayEventInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayEventInfoEXT.html struct DisplayEventInfoEXT { using NativeType = VkDisplayEventInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventTypeEXT displayEvent_ = DisplayEventTypeEXT::eFirstPixelOut, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displayEvent{ displayEvent_ } { } VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayEventInfoEXT( *reinterpret_cast( &rhs ) ) { } DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & setDisplayEvent( DisplayEventTypeEXT displayEvent_ ) & VULKAN_HPP_NOEXCEPT { displayEvent = displayEvent_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT && setDisplayEvent( DisplayEventTypeEXT displayEvent_ ) && VULKAN_HPP_NOEXCEPT { displayEvent = displayEvent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayEventInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displayEvent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayEventInfoEXT const & ) const = default; #else bool operator==( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayEvent == rhs.displayEvent ); # endif } bool operator!=( DisplayEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayEventInfoEXT; const void * pNext = {}; DisplayEventTypeEXT displayEvent = DisplayEventTypeEXT::eFirstPixelOut; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayEventInfoEXT; }; #endif template <> struct CppType { using Type = DisplayEventInfoEXT; }; // wrapper struct for struct VkDisplayModeParametersKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeParametersKHR.html struct DisplayModeParametersKHR { using NativeType = VkDisplayModeParametersKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT : visibleRegion{ visibleRegion_ } , refreshRate{ refreshRate_ } { } VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayModeParametersKHR( *reinterpret_cast( &rhs ) ) { } DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setVisibleRegion( Extent2D const & visibleRegion_ ) & VULKAN_HPP_NOEXCEPT { visibleRegion = visibleRegion_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR && setVisibleRegion( Extent2D const & visibleRegion_ ) && VULKAN_HPP_NOEXCEPT { visibleRegion = visibleRegion_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) & VULKAN_HPP_NOEXCEPT { refreshRate = refreshRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR && setRefreshRate( uint32_t refreshRate_ ) && VULKAN_HPP_NOEXCEPT { refreshRate = refreshRate_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayModeParametersKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeParametersKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayModeParametersKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( visibleRegion, refreshRate ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayModeParametersKHR const & ) const = default; #else bool operator==( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( visibleRegion == rhs.visibleRegion ) && ( refreshRate == rhs.refreshRate ); # endif } bool operator!=( DisplayModeParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Extent2D visibleRegion = {}; uint32_t refreshRate = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayModeParametersKHR; }; #endif // wrapper struct for struct VkDisplayModeCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeCreateInfoKHR.html struct DisplayModeCreateInfoKHR { using NativeType = VkDisplayModeCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateFlagsKHR flags_ = {}, DisplayModeParametersKHR parameters_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , parameters{ parameters_ } { } VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayModeCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setFlags( DisplayModeCreateFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR && setFlags( DisplayModeCreateFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & setParameters( DisplayModeParametersKHR const & parameters_ ) & VULKAN_HPP_NOEXCEPT { parameters = parameters_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR && setParameters( DisplayModeParametersKHR const & parameters_ ) && VULKAN_HPP_NOEXCEPT { parameters = parameters_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayModeCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayModeCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, parameters ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayModeCreateInfoKHR const & ) const = default; #else bool operator==( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( parameters == rhs.parameters ); # endif } bool operator!=( DisplayModeCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayModeCreateInfoKHR; const void * pNext = {}; DisplayModeCreateFlagsKHR flags = {}; DisplayModeParametersKHR parameters = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayModeCreateInfoKHR; }; #endif template <> struct CppType { using Type = DisplayModeCreateInfoKHR; }; // wrapper struct for struct VkDisplayModePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModePropertiesKHR.html struct DisplayModePropertiesKHR { using NativeType = VkDisplayModePropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModeKHR displayMode_ = {}, DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT : displayMode{ displayMode_ } , parameters{ parameters_ } { } VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayModePropertiesKHR( *reinterpret_cast( &rhs ) ) { } DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayModePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayModePropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( displayMode, parameters ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayModePropertiesKHR const & ) const = default; #else bool operator==( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( displayMode == rhs.displayMode ) && ( parameters == rhs.parameters ); # endif } bool operator!=( DisplayModePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DisplayModeKHR displayMode = {}; DisplayModeParametersKHR parameters = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayModePropertiesKHR; }; #endif // wrapper struct for struct VkDisplayModeProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeProperties2KHR.html struct DisplayModeProperties2KHR { using NativeType = VkDisplayModeProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModePropertiesKHR displayModeProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displayModeProperties{ displayModeProperties_ } { } VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayModeProperties2KHR( *reinterpret_cast( &rhs ) ) { } DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayModeProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeProperties2KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayModeProperties2KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displayModeProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayModeProperties2KHR const & ) const = default; #else bool operator==( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayModeProperties == rhs.displayModeProperties ); # endif } bool operator!=( DisplayModeProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayModeProperties2KHR; void * pNext = {}; DisplayModePropertiesKHR displayModeProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayModeProperties2KHR; }; #endif template <> struct CppType { using Type = DisplayModeProperties2KHR; }; // wrapper struct for struct VkDisplayModeStereoPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayModeStereoPropertiesNV.html struct DisplayModeStereoPropertiesNV { using NativeType = VkDisplayModeStereoPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeStereoPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( Bool32 hdmi3DSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hdmi3DSupported{ hdmi3DSupported_ } { } VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayModeStereoPropertiesNV( *reinterpret_cast( &rhs ) ) { } DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayModeStereoPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayModeStereoPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, hdmi3DSupported ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default; #else bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdmi3DSupported == rhs.hdmi3DSupported ); # endif } bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayModeStereoPropertiesNV; void * pNext = {}; Bool32 hdmi3DSupported = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayModeStereoPropertiesNV; }; #endif template <> struct CppType { using Type = DisplayModeStereoPropertiesNV; }; // wrapper struct for struct VkDisplayNativeHdrSurfaceCapabilitiesAMD, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayNativeHdrSurfaceCapabilitiesAMD.html struct DisplayNativeHdrSurfaceCapabilitiesAMD { using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( Bool32 localDimmingSupport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , localDimmingSupport{ localDimmingSupport_ } { } VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast( &rhs ) ) { } DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayNativeHdrSurfaceCapabilitiesAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, localDimmingSupport ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const & ) const = default; #else bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( localDimmingSupport == rhs.localDimmingSupport ); # endif } bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; void * pNext = {}; Bool32 localDimmingSupport = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; }; #endif template <> struct CppType { using Type = DisplayNativeHdrSurfaceCapabilitiesAMD; }; // wrapper struct for struct VkDisplayPlaneCapabilitiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilitiesKHR.html struct DisplayPlaneCapabilitiesKHR { using NativeType = VkDisplayPlaneCapabilitiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, Offset2D minSrcPosition_ = {}, Offset2D maxSrcPosition_ = {}, Extent2D minSrcExtent_ = {}, Extent2D maxSrcExtent_ = {}, Offset2D minDstPosition_ = {}, Offset2D maxDstPosition_ = {}, Extent2D minDstExtent_ = {}, Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT : supportedAlpha{ supportedAlpha_ } , minSrcPosition{ minSrcPosition_ } , maxSrcPosition{ maxSrcPosition_ } , minSrcExtent{ minSrcExtent_ } , maxSrcExtent{ maxSrcExtent_ } , minDstPosition{ minDstPosition_ } , maxDstPosition{ maxDstPosition_ } , minDstExtent{ minDstExtent_ } , maxDstExtent{ maxDstExtent_ } { } VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPlaneCapabilitiesKHR( *reinterpret_cast( &rhs ) ) { } DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayPlaneCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilitiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPlaneCapabilitiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( supportedAlpha, minSrcPosition, maxSrcPosition, minSrcExtent, maxSrcExtent, minDstPosition, maxDstPosition, minDstExtent, maxDstExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPlaneCapabilitiesKHR const & ) const = default; #else bool operator==( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( supportedAlpha == rhs.supportedAlpha ) && ( minSrcPosition == rhs.minSrcPosition ) && ( maxSrcPosition == rhs.maxSrcPosition ) && ( minSrcExtent == rhs.minSrcExtent ) && ( maxSrcExtent == rhs.maxSrcExtent ) && ( minDstPosition == rhs.minDstPosition ) && ( maxDstPosition == rhs.maxDstPosition ) && ( minDstExtent == rhs.minDstExtent ) && ( maxDstExtent == rhs.maxDstExtent ); # endif } bool operator!=( DisplayPlaneCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; Offset2D minSrcPosition = {}; Offset2D maxSrcPosition = {}; Extent2D minSrcExtent = {}; Extent2D maxSrcExtent = {}; Offset2D minDstPosition = {}; Offset2D maxDstPosition = {}; Extent2D minDstExtent = {}; Extent2D maxDstExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPlaneCapabilitiesKHR; }; #endif // wrapper struct for struct VkDisplayPlaneCapabilities2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneCapabilities2KHR.html struct DisplayPlaneCapabilities2KHR { using NativeType = VkDisplayPlaneCapabilities2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilitiesKHR capabilities_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , capabilities{ capabilities_ } { } VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPlaneCapabilities2KHR( *reinterpret_cast( &rhs ) ) { } DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayPlaneCapabilities2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneCapabilities2KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPlaneCapabilities2KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, capabilities ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPlaneCapabilities2KHR const & ) const = default; #else bool operator==( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( capabilities == rhs.capabilities ); # endif } bool operator!=( DisplayPlaneCapabilities2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; void * pNext = {}; DisplayPlaneCapabilitiesKHR capabilities = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPlaneCapabilities2KHR; }; #endif template <> struct CppType { using Type = DisplayPlaneCapabilities2KHR; }; // wrapper struct for struct VkDisplayPlaneInfo2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneInfo2KHR.html struct DisplayPlaneInfo2KHR { using NativeType = VkDisplayPlaneInfo2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mode{ mode_ } , planeIndex{ planeIndex_ } { } VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPlaneInfo2KHR( *reinterpret_cast( &rhs ) ) { } DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setMode( DisplayModeKHR mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR && setMode( DisplayModeKHR mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) & VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR && setPlaneIndex( uint32_t planeIndex_ ) && VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayPlaneInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneInfo2KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPlaneInfo2KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mode, planeIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPlaneInfo2KHR const & ) const = default; #else bool operator==( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( planeIndex == rhs.planeIndex ); # endif } bool operator!=( DisplayPlaneInfo2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayPlaneInfo2KHR; const void * pNext = {}; DisplayModeKHR mode = {}; uint32_t planeIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPlaneInfo2KHR; }; #endif template <> struct CppType { using Type = DisplayPlaneInfo2KHR; }; // wrapper struct for struct VkDisplayPlanePropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlanePropertiesKHR.html struct DisplayPlanePropertiesKHR { using NativeType = VkDisplayPlanePropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT : currentDisplay{ currentDisplay_ } , currentStackIndex{ currentStackIndex_ } { } VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPlanePropertiesKHR( *reinterpret_cast( &rhs ) ) { } DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayPlanePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlanePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPlanePropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( currentDisplay, currentStackIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPlanePropertiesKHR const & ) const = default; #else bool operator==( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( currentDisplay == rhs.currentDisplay ) && ( currentStackIndex == rhs.currentStackIndex ); # endif } bool operator!=( DisplayPlanePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DisplayKHR currentDisplay = {}; uint32_t currentStackIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPlanePropertiesKHR; }; #endif // wrapper struct for struct VkDisplayPlaneProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPlaneProperties2KHR.html struct DisplayPlaneProperties2KHR { using NativeType = VkDisplayPlaneProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displayPlaneProperties{ displayPlaneProperties_ } { } VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPlaneProperties2KHR( *reinterpret_cast( &rhs ) ) { } DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayPlaneProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPlaneProperties2KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPlaneProperties2KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displayPlaneProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPlaneProperties2KHR const & ) const = default; #else bool operator==( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPlaneProperties == rhs.displayPlaneProperties ); # endif } bool operator!=( DisplayPlaneProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayPlaneProperties2KHR; void * pNext = {}; DisplayPlanePropertiesKHR displayPlaneProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPlaneProperties2KHR; }; #endif template <> struct CppType { using Type = DisplayPlaneProperties2KHR; }; // wrapper struct for struct VkDisplayPowerInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPowerInfoEXT.html struct DisplayPowerInfoEXT { using NativeType = VkDisplayPowerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerStateEXT powerState_ = DisplayPowerStateEXT::eOff, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , powerState{ powerState_ } { } VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPowerInfoEXT( *reinterpret_cast( &rhs ) ) { } DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & setPowerState( DisplayPowerStateEXT powerState_ ) & VULKAN_HPP_NOEXCEPT { powerState = powerState_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT && setPowerState( DisplayPowerStateEXT powerState_ ) && VULKAN_HPP_NOEXCEPT { powerState = powerState_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayPowerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPowerInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPowerInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, powerState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPowerInfoEXT const & ) const = default; #else bool operator==( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( powerState == rhs.powerState ); # endif } bool operator!=( DisplayPowerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayPowerInfoEXT; const void * pNext = {}; DisplayPowerStateEXT powerState = DisplayPowerStateEXT::eOff; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPowerInfoEXT; }; #endif template <> struct CppType { using Type = DisplayPowerInfoEXT; }; // wrapper struct for struct VkDisplayPresentInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPresentInfoKHR.html struct DisplayPresentInfoKHR { using NativeType = VkDisplayPresentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( Rect2D srcRect_ = {}, Rect2D dstRect_ = {}, Bool32 persistent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcRect{ srcRect_ } , dstRect{ dstRect_ } , persistent{ persistent_ } { } VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPresentInfoKHR( *reinterpret_cast( &rhs ) ) { } DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setSrcRect( Rect2D const & srcRect_ ) & VULKAN_HPP_NOEXCEPT { srcRect = srcRect_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR && setSrcRect( Rect2D const & srcRect_ ) && VULKAN_HPP_NOEXCEPT { srcRect = srcRect_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setDstRect( Rect2D const & dstRect_ ) & VULKAN_HPP_NOEXCEPT { dstRect = dstRect_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR && setDstRect( Rect2D const & dstRect_ ) && VULKAN_HPP_NOEXCEPT { dstRect = dstRect_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & setPersistent( Bool32 persistent_ ) & VULKAN_HPP_NOEXCEPT { persistent = persistent_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR && setPersistent( Bool32 persistent_ ) && VULKAN_HPP_NOEXCEPT { persistent = persistent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplayPresentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPresentInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPresentInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcRect, dstRect, persistent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayPresentInfoKHR const & ) const = default; #else bool operator==( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcRect == rhs.srcRect ) && ( dstRect == rhs.dstRect ) && ( persistent == rhs.persistent ); # endif } bool operator!=( DisplayPresentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayPresentInfoKHR; const void * pNext = {}; Rect2D srcRect = {}; Rect2D dstRect = {}; Bool32 persistent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPresentInfoKHR; }; #endif template <> struct CppType { using Type = DisplayPresentInfoKHR; }; // wrapper struct for struct VkDisplayPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayPropertiesKHR.html struct DisplayPropertiesKHR { using NativeType = VkDisplayPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayKHR display_ = {}, const char * displayName_ = {}, Extent2D physicalDimensions_ = {}, Extent2D physicalResolution_ = {}, SurfaceTransformFlagsKHR supportedTransforms_ = {}, Bool32 planeReorderPossible_ = {}, Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT : display{ display_ } , displayName{ displayName_ } , physicalDimensions{ physicalDimensions_ } , physicalResolution{ physicalResolution_ } , supportedTransforms{ supportedTransforms_ } , planeReorderPossible{ planeReorderPossible_ } , persistentContent{ persistentContent_ } { } VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayPropertiesKHR( *reinterpret_cast( &rhs ) ) { } DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( display, displayName, physicalDimensions, physicalResolution, supportedTransforms, planeReorderPossible, persistentContent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = display <=> rhs.display; cmp != 0 ) return cmp; if ( displayName != rhs.displayName ) if ( auto cmp = strcmp( displayName, rhs.displayName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = physicalDimensions <=> rhs.physicalDimensions; cmp != 0 ) return cmp; if ( auto cmp = physicalResolution <=> rhs.physicalResolution; cmp != 0 ) return cmp; if ( auto cmp = supportedTransforms <=> rhs.supportedTransforms; cmp != 0 ) return cmp; if ( auto cmp = planeReorderPossible <=> rhs.planeReorderPossible; cmp != 0 ) return cmp; if ( auto cmp = persistentContent <=> rhs.persistentContent; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( display == rhs.display ) && ( ( displayName == rhs.displayName ) || ( strcmp( displayName, rhs.displayName ) == 0 ) ) && ( physicalDimensions == rhs.physicalDimensions ) && ( physicalResolution == rhs.physicalResolution ) && ( supportedTransforms == rhs.supportedTransforms ) && ( planeReorderPossible == rhs.planeReorderPossible ) && ( persistentContent == rhs.persistentContent ); } bool operator!=( DisplayPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: DisplayKHR display = {}; const char * displayName = {}; Extent2D physicalDimensions = {}; Extent2D physicalResolution = {}; SurfaceTransformFlagsKHR supportedTransforms = {}; Bool32 planeReorderPossible = {}; Bool32 persistentContent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayPropertiesKHR; }; #endif // wrapper struct for struct VkDisplayProperties2KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplayProperties2KHR.html struct DisplayProperties2KHR { using NativeType = VkDisplayProperties2KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayPropertiesKHR displayProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displayProperties{ displayProperties_ } { } VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplayProperties2KHR( *reinterpret_cast( &rhs ) ) { } DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDisplayProperties2KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplayProperties2KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplayProperties2KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displayProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplayProperties2KHR const & ) const = default; #else bool operator==( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayProperties == rhs.displayProperties ); # endif } bool operator!=( DisplayProperties2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplayProperties2KHR; void * pNext = {}; DisplayPropertiesKHR displayProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplayProperties2KHR; }; #endif template <> struct CppType { using Type = DisplayProperties2KHR; }; // wrapper struct for struct VkDisplaySurfaceCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceCreateInfoKHR.html struct DisplaySurfaceCreateInfoKHR { using NativeType = VkDisplaySurfaceCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateFlagsKHR flags_ = {}, DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, SurfaceTransformFlagBitsKHR transform_ = SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, DisplayPlaneAlphaFlagBitsKHR alphaMode_ = DisplayPlaneAlphaFlagBitsKHR::eOpaque, Extent2D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , displayMode{ displayMode_ } , planeIndex{ planeIndex_ } , planeStackIndex{ planeStackIndex_ } , transform{ transform_ } , globalAlpha{ globalAlpha_ } , alphaMode{ alphaMode_ } , imageExtent{ imageExtent_ } { } VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : DisplaySurfaceCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setFlags( DisplaySurfaceCreateFlagsKHR flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setFlags( DisplaySurfaceCreateFlagsKHR flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setDisplayMode( DisplayModeKHR displayMode_ ) & VULKAN_HPP_NOEXCEPT { displayMode = displayMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setDisplayMode( DisplayModeKHR displayMode_ ) && VULKAN_HPP_NOEXCEPT { displayMode = displayMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) & VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setPlaneIndex( uint32_t planeIndex_ ) && VULKAN_HPP_NOEXCEPT { planeIndex = planeIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) & VULKAN_HPP_NOEXCEPT { planeStackIndex = planeStackIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setPlaneStackIndex( uint32_t planeStackIndex_ ) && VULKAN_HPP_NOEXCEPT { planeStackIndex = planeStackIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setTransform( SurfaceTransformFlagBitsKHR transform_ ) & VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setTransform( SurfaceTransformFlagBitsKHR transform_ ) && VULKAN_HPP_NOEXCEPT { transform = transform_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) & VULKAN_HPP_NOEXCEPT { globalAlpha = globalAlpha_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setGlobalAlpha( float globalAlpha_ ) && VULKAN_HPP_NOEXCEPT { globalAlpha = globalAlpha_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) & VULKAN_HPP_NOEXCEPT { alphaMode = alphaMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setAlphaMode( DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) && VULKAN_HPP_NOEXCEPT { alphaMode = alphaMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & setImageExtent( Extent2D const & imageExtent_ ) & VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR && setImageExtent( Extent2D const & imageExtent_ ) && VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplaySurfaceCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplaySurfaceCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplaySurfaceCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, displayMode, planeIndex, planeStackIndex, transform, globalAlpha, alphaMode, imageExtent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplaySurfaceCreateInfoKHR const & ) const = default; #else bool operator==( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( displayMode == rhs.displayMode ) && ( planeIndex == rhs.planeIndex ) && ( planeStackIndex == rhs.planeStackIndex ) && ( transform == rhs.transform ) && ( globalAlpha == rhs.globalAlpha ) && ( alphaMode == rhs.alphaMode ) && ( imageExtent == rhs.imageExtent ); # endif } bool operator!=( DisplaySurfaceCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; const void * pNext = {}; DisplaySurfaceCreateFlagsKHR flags = {}; DisplayModeKHR displayMode = {}; uint32_t planeIndex = {}; uint32_t planeStackIndex = {}; SurfaceTransformFlagBitsKHR transform = SurfaceTransformFlagBitsKHR::eIdentity; float globalAlpha = {}; DisplayPlaneAlphaFlagBitsKHR alphaMode = DisplayPlaneAlphaFlagBitsKHR::eOpaque; Extent2D imageExtent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplaySurfaceCreateInfoKHR; }; #endif template <> struct CppType { using Type = DisplaySurfaceCreateInfoKHR; }; // wrapper struct for struct VkDisplaySurfaceStereoCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDisplaySurfaceStereoCreateInfoNV.html struct DisplaySurfaceStereoCreateInfoNV { using NativeType = VkDisplaySurfaceStereoCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceStereoCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoTypeNV stereoType_ = DisplaySurfaceStereoTypeNV::eNone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stereoType{ stereoType_ } { } VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast( &rhs ) ) { } DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( DisplaySurfaceStereoTypeNV stereoType_ ) & VULKAN_HPP_NOEXCEPT { stereoType = stereoType_; return *this; } VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV && setStereoType( DisplaySurfaceStereoTypeNV stereoType_ ) && VULKAN_HPP_NOEXCEPT { stereoType = stereoType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDisplaySurfaceStereoCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDisplaySurfaceStereoCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stereoType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default; #else bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stereoType == rhs.stereoType ); # endif } bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDisplaySurfaceStereoCreateInfoNV; const void * pNext = {}; DisplaySurfaceStereoTypeNV stereoType = DisplaySurfaceStereoTypeNV::eNone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DisplaySurfaceStereoCreateInfoNV; }; #endif template <> struct CppType { using Type = DisplaySurfaceStereoCreateInfoNV; }; // wrapper struct for struct VkDrawIndexedIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndexedIndirectCommand.html struct DrawIndexedIndirectCommand { using NativeType = VkDrawIndexedIndirectCommand; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT : indexCount{ indexCount_ } , instanceCount{ instanceCount_ } , firstIndex{ firstIndex_ } , vertexOffset{ vertexOffset_ } , firstInstance{ firstInstance_ } { } VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndexedIndirectCommand( *reinterpret_cast( &rhs ) ) { } DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) & VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand && setIndexCount( uint32_t indexCount_ ) && VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) & VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand && setInstanceCount( uint32_t instanceCount_ ) && VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) & VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand && setFirstIndex( uint32_t firstIndex_ ) && VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) & VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand && setVertexOffset( int32_t vertexOffset_ ) && VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) & VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand && setFirstInstance( uint32_t firstInstance_ ) && VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDrawIndexedIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndexedIndirectCommand const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrawIndexedIndirectCommand *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrawIndexedIndirectCommand const & ) const = default; #else bool operator==( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( indexCount == rhs.indexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstIndex == rhs.firstIndex ) && ( vertexOffset == rhs.vertexOffset ) && ( firstInstance == rhs.firstInstance ); # endif } bool operator!=( DrawIndexedIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t indexCount = {}; uint32_t instanceCount = {}; uint32_t firstIndex = {}; int32_t vertexOffset = {}; uint32_t firstInstance = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrawIndexedIndirectCommand; }; #endif // wrapper struct for struct VkDrawIndirectCommand, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCommand.html struct DrawIndirectCommand { using NativeType = VkDrawIndirectCommand; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT : vertexCount{ vertexCount_ } , instanceCount{ instanceCount_ } , firstVertex{ firstVertex_ } , firstInstance{ firstInstance_ } { } VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCommand( *reinterpret_cast( &rhs ) ) { } DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) & VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand && setVertexCount( uint32_t vertexCount_ ) && VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) & VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand && setInstanceCount( uint32_t instanceCount_ ) && VULKAN_HPP_NOEXCEPT { instanceCount = instanceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) & VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand && setFirstVertex( uint32_t firstVertex_ ) && VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) & VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand && setFirstInstance( uint32_t firstInstance_ ) && VULKAN_HPP_NOEXCEPT { firstInstance = firstInstance_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDrawIndirectCommand const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndirectCommand const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrawIndirectCommand *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( vertexCount, instanceCount, firstVertex, firstInstance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrawIndirectCommand const & ) const = default; #else bool operator==( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( vertexCount == rhs.vertexCount ) && ( instanceCount == rhs.instanceCount ) && ( firstVertex == rhs.firstVertex ) && ( firstInstance == rhs.firstInstance ); # endif } bool operator!=( DrawIndirectCommand const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t vertexCount = {}; uint32_t instanceCount = {}; uint32_t firstVertex = {}; uint32_t firstInstance = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrawIndirectCommand; }; #endif // wrapper struct for struct VkDrawIndirectCountIndirectCommandEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawIndirectCountIndirectCommandEXT.html struct DrawIndirectCountIndirectCommandEXT { using NativeType = VkDrawIndirectCountIndirectCommandEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DeviceAddress bufferAddress_ = {}, uint32_t stride_ = {}, uint32_t commandCount_ = {} ) VULKAN_HPP_NOEXCEPT : bufferAddress{ bufferAddress_ } , stride{ stride_ } , commandCount{ commandCount_ } { } VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrawIndirectCountIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( DeviceAddress bufferAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT && setBufferAddress( DeviceAddress bufferAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferAddress = bufferAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT && setStride( uint32_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) & VULKAN_HPP_NOEXCEPT { commandCount = commandCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT && setCommandCount( uint32_t commandCount_ ) && VULKAN_HPP_NOEXCEPT { commandCount = commandCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawIndirectCountIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrawIndirectCountIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( bufferAddress, stride, commandCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default; #else bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( bufferAddress == rhs.bufferAddress ) && ( stride == rhs.stride ) && ( commandCount == rhs.commandCount ); # endif } bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceAddress bufferAddress = {}; uint32_t stride = {}; uint32_t commandCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrawIndirectCountIndirectCommandEXT; }; #endif // wrapper struct for struct VkDrawMeshTasksIndirectCommandEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandEXT.html struct DrawMeshTasksIndirectCommandEXT { using NativeType = VkDrawMeshTasksIndirectCommandEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT : groupCountX{ groupCountX_ } , groupCountY{ groupCountY_ } , groupCountZ{ groupCountZ_ } { } VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrawMeshTasksIndirectCommandEXT( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrawMeshTasksIndirectCommandEXT( *reinterpret_cast( &rhs ) ) { } DrawMeshTasksIndirectCommandEXT & operator=( DrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrawMeshTasksIndirectCommandEXT & operator=( VkDrawMeshTasksIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountX( uint32_t groupCountX_ ) & VULKAN_HPP_NOEXCEPT { groupCountX = groupCountX_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT && setGroupCountX( uint32_t groupCountX_ ) && VULKAN_HPP_NOEXCEPT { groupCountX = groupCountX_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountY( uint32_t groupCountY_ ) & VULKAN_HPP_NOEXCEPT { groupCountY = groupCountY_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT && setGroupCountY( uint32_t groupCountY_ ) && VULKAN_HPP_NOEXCEPT { groupCountY = groupCountY_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT & setGroupCountZ( uint32_t groupCountZ_ ) & VULKAN_HPP_NOEXCEPT { groupCountZ = groupCountZ_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandEXT && setGroupCountZ( uint32_t groupCountZ_ ) && VULKAN_HPP_NOEXCEPT { groupCountZ = groupCountZ_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDrawMeshTasksIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( groupCountX, groupCountY, groupCountZ ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrawMeshTasksIndirectCommandEXT const & ) const = default; #else bool operator==( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( groupCountX == rhs.groupCountX ) && ( groupCountY == rhs.groupCountY ) && ( groupCountZ == rhs.groupCountZ ); # endif } bool operator!=( DrawMeshTasksIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t groupCountX = {}; uint32_t groupCountY = {}; uint32_t groupCountZ = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrawMeshTasksIndirectCommandEXT; }; #endif // wrapper struct for struct VkDrawMeshTasksIndirectCommandNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrawMeshTasksIndirectCommandNV.html struct DrawMeshTasksIndirectCommandNV { using NativeType = VkDrawMeshTasksIndirectCommandNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT : taskCount{ taskCount_ } , firstTask{ firstTask_ } { } VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT : DrawMeshTasksIndirectCommandNV( *reinterpret_cast( &rhs ) ) { } DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) & VULKAN_HPP_NOEXCEPT { taskCount = taskCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV && setTaskCount( uint32_t taskCount_ ) && VULKAN_HPP_NOEXCEPT { taskCount = taskCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) & VULKAN_HPP_NOEXCEPT { firstTask = firstTask_; return *this; } VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV && setFirstTask( uint32_t firstTask_ ) && VULKAN_HPP_NOEXCEPT { firstTask = firstTask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkDrawMeshTasksIndirectCommandNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrawMeshTasksIndirectCommandNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( taskCount, firstTask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrawMeshTasksIndirectCommandNV const & ) const = default; #else bool operator==( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( taskCount == rhs.taskCount ) && ( firstTask == rhs.firstTask ); # endif } bool operator!=( DrawMeshTasksIndirectCommandNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t taskCount = {}; uint32_t firstTask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrawMeshTasksIndirectCommandNV; }; #endif // wrapper struct for struct VkDrmFormatModifierProperties2EXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierProperties2EXT.html struct DrmFormatModifierProperties2EXT { using NativeType = VkDrmFormatModifierProperties2EXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifier{ drmFormatModifier_ } , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } { } VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierProperties2EXT( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrmFormatModifierProperties2EXT( *reinterpret_cast( &rhs ) ) { } DrmFormatModifierProperties2EXT & operator=( DrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrmFormatModifierProperties2EXT & operator=( VkDrmFormatModifierProperties2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDrmFormatModifierProperties2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierProperties2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierProperties2EXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrmFormatModifierProperties2EXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrmFormatModifierProperties2EXT const & ) const = default; #else bool operator==( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); # endif } bool operator!=( DrmFormatModifierProperties2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; FormatFeatureFlags2 drmFormatModifierTilingFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrmFormatModifierProperties2EXT; }; #endif // wrapper struct for struct VkDrmFormatModifierPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesEXT.html struct DrmFormatModifierPropertiesEXT { using NativeType = VkDrmFormatModifierPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : drmFormatModifier{ drmFormatModifier_ } , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) { } DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( drmFormatModifier, drmFormatModifierPlaneCount, drmFormatModifierTilingFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrmFormatModifierPropertiesEXT const & ) const = default; #else bool operator==( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); # endif } bool operator!=( DrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; FormatFeatureFlags drmFormatModifierTilingFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrmFormatModifierPropertiesEXT; }; #endif // wrapper struct for struct VkDrmFormatModifierPropertiesList2EXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesList2EXT.html struct DrmFormatModifierPropertiesList2EXT { using NativeType = VkDrmFormatModifierPropertiesList2EXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesList2EXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifierCount{ drmFormatModifierCount_ } , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesList2EXT( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesList2EXT( *reinterpret_cast( &rhs ) ) { } DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrmFormatModifierPropertiesList2EXT & operator=( VkDrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDrmFormatModifierPropertiesList2EXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesList2EXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesList2EXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesList2EXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrmFormatModifierPropertiesList2EXT const & ) const = default; #else bool operator==( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); # endif } bool operator!=( DrmFormatModifierPropertiesList2EXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDrmFormatModifierPropertiesList2EXT; void * pNext = {}; uint32_t drmFormatModifierCount = {}; DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrmFormatModifierPropertiesList2EXT; }; #endif template <> struct CppType { using Type = DrmFormatModifierPropertiesList2EXT; }; // wrapper struct for struct VkDrmFormatModifierPropertiesListEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkDrmFormatModifierPropertiesListEXT.html struct DrmFormatModifierPropertiesListEXT { using NativeType = VkDrmFormatModifierPropertiesListEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifierCount{ drmFormatModifierCount_ } , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DrmFormatModifierPropertiesListEXT( *reinterpret_cast( &rhs ) ) { } DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkDrmFormatModifierPropertiesListEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesListEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkDrmFormatModifierPropertiesListEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifierProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DrmFormatModifierPropertiesListEXT const & ) const = default; #else bool operator==( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); # endif } bool operator!=( DrmFormatModifierPropertiesListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; void * pNext = {}; uint32_t drmFormatModifierCount = {}; DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = DrmFormatModifierPropertiesListEXT; }; #endif template <> struct CppType { using Type = DrmFormatModifierPropertiesListEXT; }; // wrapper struct for struct VkEventCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkEventCreateInfo.html struct EventCreateInfo { using NativeType = VkEventCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : EventCreateInfo( *reinterpret_cast( &rhs ) ) {} EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 EventCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & setFlags( EventCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 EventCreateInfo && setFlags( EventCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkEventCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkEventCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkEventCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( EventCreateInfo const & ) const = default; #else bool operator==( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( EventCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eEventCreateInfo; const void * pNext = {}; EventCreateFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = EventCreateInfo; }; #endif template <> struct CppType { using Type = EventCreateInfo; }; // wrapper struct for struct VkPipelineLibraryCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineLibraryCreateInfoKHR.html struct PipelineLibraryCreateInfoKHR { using NativeType = VkPipelineLibraryCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, const Pipeline * pLibraries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , libraryCount{ libraryCount_ } , pLibraries{ pLibraries_ } { } VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineLibraryCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineLibraryCreateInfoKHR( ArrayProxyNoTemporaries const & libraries_, const void * pNext_ = nullptr ) : pNext( pNext_ ), libraryCount( static_cast( libraries_.size() ) ), pLibraries( libraries_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) & VULKAN_HPP_NOEXCEPT { libraryCount = libraryCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR && setLibraryCount( uint32_t libraryCount_ ) && VULKAN_HPP_NOEXCEPT { libraryCount = libraryCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & setPLibraries( const Pipeline * pLibraries_ ) & VULKAN_HPP_NOEXCEPT { pLibraries = pLibraries_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR && setPLibraries( const Pipeline * pLibraries_ ) && VULKAN_HPP_NOEXCEPT { pLibraries = pLibraries_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineLibraryCreateInfoKHR & setLibraries( ArrayProxyNoTemporaries const & libraries_ ) VULKAN_HPP_NOEXCEPT { libraryCount = static_cast( libraries_.size() ); pLibraries = libraries_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineLibraryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineLibraryCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineLibraryCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, libraryCount, pLibraries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineLibraryCreateInfoKHR const & ) const = default; #else bool operator==( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( libraryCount == rhs.libraryCount ) && ( pLibraries == rhs.pLibraries ); # endif } bool operator!=( PipelineLibraryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR; const void * pNext = {}; uint32_t libraryCount = {}; const Pipeline * pLibraries = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineLibraryCreateInfoKHR; }; #endif template <> struct CppType { using Type = PipelineLibraryCreateInfoKHR; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkExecutionGraphPipelineCreateInfoAMDX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineCreateInfoAMDX.html struct ExecutionGraphPipelineCreateInfoAMDX { using NativeType = VkExecutionGraphPipelineCreateInfoAMDX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const PipelineShaderStageCreateInfo * pStages_ = {}, const PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, PipelineLayout layout_ = {}, Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , stageCount{ stageCount_ } , pStages{ pStages_ } , pLibraryInfo{ pLibraryInfo_ } , layout{ layout_ } , basePipelineHandle{ basePipelineHandle_ } , basePipelineIndex{ basePipelineIndex_ } { } VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineCreateInfoAMDX( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExecutionGraphPipelineCreateInfoAMDX( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : ExecutionGraphPipelineCreateInfoAMDX( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ExecutionGraphPipelineCreateInfoAMDX( PipelineCreateFlags flags_, ArrayProxyNoTemporaries const & stages_, const PipelineLibraryCreateInfoKHR * pLibraryInfo_ = {}, PipelineLayout layout_ = {}, Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , stageCount( static_cast( stages_.size() ) ) , pStages( stages_.data() ) , pLibraryInfo( pLibraryInfo_ ) , layout( layout_ ) , basePipelineHandle( basePipelineHandle_ ) , basePipelineIndex( basePipelineIndex_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ExecutionGraphPipelineCreateInfoAMDX & operator=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExecutionGraphPipelineCreateInfoAMDX & operator=( VkExecutionGraphPipelineCreateInfoAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setFlags( PipelineCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setFlags( PipelineCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setStageCount( uint32_t stageCount_ ) & VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setStageCount( uint32_t stageCount_ ) && VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) & VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setPStages( const PipelineShaderStageCreateInfo * pStages_ ) && VULKAN_HPP_NOEXCEPT { pStages = pStages_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ExecutionGraphPipelineCreateInfoAMDX & setStages( ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { stageCount = static_cast( stages_.size() ); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setPLibraryInfo( const PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) & VULKAN_HPP_NOEXCEPT { pLibraryInfo = pLibraryInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setPLibraryInfo( const PipelineLibraryCreateInfoKHR * pLibraryInfo_ ) && VULKAN_HPP_NOEXCEPT { pLibraryInfo = pLibraryInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineHandle( Pipeline basePipelineHandle_ ) & VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setBasePipelineHandle( Pipeline basePipelineHandle_ ) && VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX & setBasePipelineIndex( int32_t basePipelineIndex_ ) & VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineCreateInfoAMDX && setBasePipelineIndex( int32_t basePipelineIndex_ ) && VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExecutionGraphPipelineCreateInfoAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExecutionGraphPipelineCreateInfoAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExecutionGraphPipelineCreateInfoAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExecutionGraphPipelineCreateInfoAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, stageCount, pStages, pLibraryInfo, layout, basePipelineHandle, basePipelineIndex ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExecutionGraphPipelineCreateInfoAMDX const & ) const = default; # else bool operator==( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( pLibraryInfo == rhs.pLibraryInfo ) && ( layout == rhs.layout ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); # endif } bool operator!=( ExecutionGraphPipelineCreateInfoAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExecutionGraphPipelineCreateInfoAMDX; const void * pNext = {}; PipelineCreateFlags flags = {}; uint32_t stageCount = {}; const PipelineShaderStageCreateInfo * pStages = {}; const PipelineLibraryCreateInfoKHR * pLibraryInfo = {}; PipelineLayout layout = {}; Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExecutionGraphPipelineCreateInfoAMDX; }; # endif template <> struct CppType { using Type = ExecutionGraphPipelineCreateInfoAMDX; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkExecutionGraphPipelineScratchSizeAMDX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExecutionGraphPipelineScratchSizeAMDX.html struct ExecutionGraphPipelineScratchSizeAMDX { using NativeType = VkExecutionGraphPipelineScratchSizeAMDX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( DeviceSize minSize_ = {}, DeviceSize maxSize_ = {}, DeviceSize sizeGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minSize{ minSize_ } , maxSize{ maxSize_ } , sizeGranularity{ sizeGranularity_ } { } VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExecutionGraphPipelineScratchSizeAMDX( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : ExecutionGraphPipelineScratchSizeAMDX( *reinterpret_cast( &rhs ) ) { } ExecutionGraphPipelineScratchSizeAMDX & operator=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExecutionGraphPipelineScratchSizeAMDX & operator=( VkExecutionGraphPipelineScratchSizeAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( DeviceSize minSize_ ) & VULKAN_HPP_NOEXCEPT { minSize = minSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX && setMinSize( DeviceSize minSize_ ) && VULKAN_HPP_NOEXCEPT { minSize = minSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( DeviceSize maxSize_ ) & VULKAN_HPP_NOEXCEPT { maxSize = maxSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX && setMaxSize( DeviceSize maxSize_ ) && VULKAN_HPP_NOEXCEPT { maxSize = maxSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( DeviceSize sizeGranularity_ ) & VULKAN_HPP_NOEXCEPT { sizeGranularity = sizeGranularity_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX && setSizeGranularity( DeviceSize sizeGranularity_ ) && VULKAN_HPP_NOEXCEPT { sizeGranularity = sizeGranularity_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExecutionGraphPipelineScratchSizeAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExecutionGraphPipelineScratchSizeAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExecutionGraphPipelineScratchSizeAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExecutionGraphPipelineScratchSizeAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minSize, maxSize, sizeGranularity ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExecutionGraphPipelineScratchSizeAMDX const & ) const = default; # else bool operator==( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) && ( sizeGranularity == rhs.sizeGranularity ); # endif } bool operator!=( ExecutionGraphPipelineScratchSizeAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; void * pNext = {}; DeviceSize minSize = {}; DeviceSize maxSize = {}; DeviceSize sizeGranularity = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExecutionGraphPipelineScratchSizeAMDX; }; # endif template <> struct CppType { using Type = ExecutionGraphPipelineScratchSizeAMDX; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkExportFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceCreateInfo.html struct ExportFenceCreateInfo { using NativeType = VkExportFenceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExternalFenceHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ExportFenceCreateInfo( *reinterpret_cast( &rhs ) ) { } ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo && setHandleTypes( ExternalFenceHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportFenceCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportFenceCreateInfo const & ) const = default; #else bool operator==( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExportFenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExportFenceCreateInfo; const void * pNext = {}; ExternalFenceHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportFenceCreateInfo; }; #endif template <> struct CppType { using Type = ExportFenceCreateInfo; }; using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkExportFenceWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportFenceWin32HandleInfoKHR.html struct ExportFenceWin32HandleInfoKHR { using NativeType = VkExportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pAttributes{ pAttributes_ } , dwAccess{ dwAccess_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ExportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) & VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR && setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) && VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) & VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR && setDwAccess( DWORD dwAccess_ ) && VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pAttributes, dwAccess, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportFenceWin32HandleInfoKHR const & ) const = default; # else bool operator==( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); # endif } bool operator!=( ExportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; const void * pNext = {}; const SECURITY_ATTRIBUTES * pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportFenceWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ExportFenceWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ // wrapper struct for struct VkExportMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfo.html struct ExportMemoryAllocateInfo { using NativeType = VkExportMemoryAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMemoryAllocateInfo( *reinterpret_cast( &rhs ) ) { } ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo && setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMemoryAllocateInfo const & ) const = default; #else bool operator==( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExportMemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExportMemoryAllocateInfo; const void * pNext = {}; ExternalMemoryHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMemoryAllocateInfo; }; #endif template <> struct CppType { using Type = ExportMemoryAllocateInfo; }; using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; // wrapper struct for struct VkExportMemoryAllocateInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryAllocateInfoNV.html struct ExportMemoryAllocateInfoNV { using NativeType = VkExportMemoryAllocateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMemoryAllocateInfoNV( *reinterpret_cast( &rhs ) ) { } ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV && setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMemoryAllocateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryAllocateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMemoryAllocateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMemoryAllocateInfoNV const & ) const = default; #else bool operator==( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExportMemoryAllocateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExportMemoryAllocateInfoNV; const void * pNext = {}; ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMemoryAllocateInfoNV; }; #endif template <> struct CppType { using Type = ExportMemoryAllocateInfoNV; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkExportMemoryWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoKHR.html struct ExportMemoryWin32HandleInfoKHR { using NativeType = VkExportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pAttributes{ pAttributes_ } , dwAccess{ dwAccess_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) & VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR && setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) && VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) & VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR && setDwAccess( DWORD dwAccess_ ) && VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pAttributes, dwAccess, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMemoryWin32HandleInfoKHR const & ) const = default; # else bool operator==( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); # endif } bool operator!=( ExportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; const void * pNext = {}; const SECURITY_ATTRIBUTES * pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMemoryWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ExportMemoryWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkExportMemoryWin32HandleInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMemoryWin32HandleInfoNV.html struct ExportMemoryWin32HandleInfoNV { using NativeType = VkExportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pAttributes{ pAttributes_ } , dwAccess{ dwAccess_ } { } VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) { } ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) & VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV && setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) && VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) & VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV && setDwAccess( DWORD dwAccess_ ) && VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pAttributes, dwAccess ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMemoryWin32HandleInfoNV const & ) const = default; # else bool operator==( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ); # endif } bool operator!=( ExportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; const void * pNext = {}; const SECURITY_ATTRIBUTES * pAttributes = {}; DWORD dwAccess = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMemoryWin32HandleInfoNV; }; # endif template <> struct CppType { using Type = ExportMemoryWin32HandleInfoNV; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalBufferInfoEXT.html struct ExportMetalBufferInfoEXT { using NativeType = VkExportMetalBufferInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalBufferInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( DeviceMemory memory_ = {}, MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , mtlBuffer{ mtlBuffer_ } { } VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalBufferInfoEXT( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalBufferInfoEXT & operator=( ExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalBufferInfoEXT & operator=( VkExportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) & VULKAN_HPP_NOEXCEPT { mtlBuffer = mtlBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalBufferInfoEXT && setMtlBuffer( MTLBuffer_id mtlBuffer_ ) && VULKAN_HPP_NOEXCEPT { mtlBuffer = mtlBuffer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, mtlBuffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalBufferInfoEXT const & ) const = default; # else bool operator==( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( mtlBuffer == rhs.mtlBuffer ); # endif } bool operator!=( ExportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalBufferInfoEXT; const void * pNext = {}; DeviceMemory memory = {}; MTLBuffer_id mtlBuffer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalBufferInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalBufferInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalCommandQueueInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalCommandQueueInfoEXT.html struct ExportMetalCommandQueueInfoEXT { using NativeType = VkExportMetalCommandQueueInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalCommandQueueInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( Queue queue_ = {}, MTLCommandQueue_id mtlCommandQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , queue{ queue_ } , mtlCommandQueue{ mtlCommandQueue_ } { } VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalCommandQueueInfoEXT( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalCommandQueueInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalCommandQueueInfoEXT & operator=( ExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalCommandQueueInfoEXT & operator=( VkExportMetalCommandQueueInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setQueue( Queue queue_ ) & VULKAN_HPP_NOEXCEPT { queue = queue_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT && setQueue( Queue queue_ ) && VULKAN_HPP_NOEXCEPT { queue = queue_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT & setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) & VULKAN_HPP_NOEXCEPT { mtlCommandQueue = mtlCommandQueue_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalCommandQueueInfoEXT && setMtlCommandQueue( MTLCommandQueue_id mtlCommandQueue_ ) && VULKAN_HPP_NOEXCEPT { mtlCommandQueue = mtlCommandQueue_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalCommandQueueInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalCommandQueueInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalCommandQueueInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalCommandQueueInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, queue, mtlCommandQueue ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalCommandQueueInfoEXT const & ) const = default; # else bool operator==( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queue == rhs.queue ) && ( mtlCommandQueue == rhs.mtlCommandQueue ); # endif } bool operator!=( ExportMetalCommandQueueInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalCommandQueueInfoEXT; const void * pNext = {}; Queue queue = {}; MTLCommandQueue_id mtlCommandQueue = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalCommandQueueInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalCommandQueueInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalDeviceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalDeviceInfoEXT.html struct ExportMetalDeviceInfoEXT { using NativeType = VkExportMetalDeviceInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalDeviceInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mtlDevice{ mtlDevice_ } { } VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalDeviceInfoEXT( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalDeviceInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalDeviceInfoEXT & operator=( ExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalDeviceInfoEXT & operator=( VkExportMetalDeviceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT & setMtlDevice( MTLDevice_id mtlDevice_ ) & VULKAN_HPP_NOEXCEPT { mtlDevice = mtlDevice_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalDeviceInfoEXT && setMtlDevice( MTLDevice_id mtlDevice_ ) && VULKAN_HPP_NOEXCEPT { mtlDevice = mtlDevice_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalDeviceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalDeviceInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalDeviceInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalDeviceInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mtlDevice ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalDeviceInfoEXT const & ) const = default; # else bool operator==( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlDevice == rhs.mtlDevice ); # endif } bool operator!=( ExportMetalDeviceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalDeviceInfoEXT; const void * pNext = {}; MTLDevice_id mtlDevice = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalDeviceInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalDeviceInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalIOSurfaceInfoEXT.html struct ExportMetalIOSurfaceInfoEXT { using NativeType = VkExportMetalIOSurfaceInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalIoSurfaceInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , ioSurface{ ioSurface_ } { } VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalIOSurfaceInfoEXT( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalIOSurfaceInfoEXT & operator=( ExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalIOSurfaceInfoEXT & operator=( VkExportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) & VULKAN_HPP_NOEXCEPT { ioSurface = ioSurface_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalIOSurfaceInfoEXT && setIoSurface( IOSurfaceRef ioSurface_ ) && VULKAN_HPP_NOEXCEPT { ioSurface = ioSurface_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, ioSurface ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalIOSurfaceInfoEXT const & ) const = default; # else bool operator==( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( ioSurface == rhs.ioSurface ); # endif } bool operator!=( ExportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalIoSurfaceInfoEXT; const void * pNext = {}; Image image = {}; IOSurfaceRef ioSurface = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalIOSurfaceInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalIOSurfaceInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalObjectCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectCreateInfoEXT.html struct ExportMetalObjectCreateInfoEXT { using NativeType = VkExportMetalObjectCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectCreateInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , exportObjectType{ exportObjectType_ } { } VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalObjectCreateInfoEXT( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalObjectCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalObjectCreateInfoEXT & operator=( ExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalObjectCreateInfoEXT & operator=( VkExportMetalObjectCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT & setExportObjectType( ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) & VULKAN_HPP_NOEXCEPT { exportObjectType = exportObjectType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectCreateInfoEXT && setExportObjectType( ExportMetalObjectTypeFlagBitsEXT exportObjectType_ ) && VULKAN_HPP_NOEXCEPT { exportObjectType = exportObjectType_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalObjectCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalObjectCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalObjectCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalObjectCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, exportObjectType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalObjectCreateInfoEXT const & ) const = default; # else bool operator==( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportObjectType == rhs.exportObjectType ); # endif } bool operator!=( ExportMetalObjectCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalObjectCreateInfoEXT; const void * pNext = {}; ExportMetalObjectTypeFlagBitsEXT exportObjectType = ExportMetalObjectTypeFlagBitsEXT::eMetalDevice; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalObjectCreateInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalObjectCreateInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalObjectsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalObjectsInfoEXT.html struct ExportMetalObjectsInfoEXT { using NativeType = VkExportMetalObjectsInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalObjectsInfoEXT( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalObjectsInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalObjectsInfoEXT & operator=( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalObjectsInfoEXT & operator=( VkExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalObjectsInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalObjectsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalObjectsInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalObjectsInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalObjectsInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalObjectsInfoEXT const & ) const = default; # else bool operator==( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( ExportMetalObjectsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalObjectsInfoEXT; const void * pNext = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalObjectsInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalObjectsInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalSharedEventInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalSharedEventInfoEXT.html struct ExportMetalSharedEventInfoEXT { using NativeType = VkExportMetalSharedEventInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalSharedEventInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( Semaphore semaphore_ = {}, Event event_ = {}, MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , semaphore{ semaphore_ } , event{ event_ } , mtlSharedEvent{ mtlSharedEvent_ } { } VULKAN_HPP_CONSTEXPR ExportMetalSharedEventInfoEXT( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalSharedEventInfoEXT( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalSharedEventInfoEXT & operator=( ExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalSharedEventInfoEXT & operator=( VkExportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setSemaphore( Semaphore semaphore_ ) & VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT && setSemaphore( Semaphore semaphore_ ) && VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setEvent( Event event_ ) & VULKAN_HPP_NOEXCEPT { event = event_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT && setEvent( Event event_ ) && VULKAN_HPP_NOEXCEPT { event = event_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) & VULKAN_HPP_NOEXCEPT { mtlSharedEvent = mtlSharedEvent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalSharedEventInfoEXT && setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) && VULKAN_HPP_NOEXCEPT { mtlSharedEvent = mtlSharedEvent_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, semaphore, event, mtlSharedEvent ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalSharedEventInfoEXT const & ) const = default; # else bool operator==( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( event == rhs.event ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); # endif } bool operator!=( ExportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalSharedEventInfoEXT; const void * pNext = {}; Semaphore semaphore = {}; Event event = {}; MTLSharedEvent_id mtlSharedEvent = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalSharedEventInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalSharedEventInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkExportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportMetalTextureInfoEXT.html struct ExportMetalTextureInfoEXT { using NativeType = VkExportMetalTextureInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalTextureInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( Image image_ = {}, ImageView imageView_ = {}, BufferView bufferView_ = {}, ImageAspectFlagBits plane_ = ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , imageView{ imageView_ } , bufferView{ bufferView_ } , plane{ plane_ } , mtlTexture{ mtlTexture_ } { } VULKAN_HPP_CONSTEXPR ExportMetalTextureInfoEXT( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportMetalTextureInfoEXT( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) { } ExportMetalTextureInfoEXT & operator=( ExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportMetalTextureInfoEXT & operator=( VkExportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setImageView( ImageView imageView_ ) & VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setImageView( ImageView imageView_ ) && VULKAN_HPP_NOEXCEPT { imageView = imageView_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setBufferView( BufferView bufferView_ ) & VULKAN_HPP_NOEXCEPT { bufferView = bufferView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setBufferView( BufferView bufferView_ ) && VULKAN_HPP_NOEXCEPT { bufferView = bufferView_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setPlane( ImageAspectFlagBits plane_ ) & VULKAN_HPP_NOEXCEPT { plane = plane_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setPlane( ImageAspectFlagBits plane_ ) && VULKAN_HPP_NOEXCEPT { plane = plane_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) & VULKAN_HPP_NOEXCEPT { mtlTexture = mtlTexture_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportMetalTextureInfoEXT && setMtlTexture( MTLTexture_id mtlTexture_ ) && VULKAN_HPP_NOEXCEPT { mtlTexture = mtlTexture_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, imageView, bufferView, plane, mtlTexture ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportMetalTextureInfoEXT const & ) const = default; # else bool operator==( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( imageView == rhs.imageView ) && ( bufferView == rhs.bufferView ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); # endif } bool operator!=( ExportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportMetalTextureInfoEXT; const void * pNext = {}; Image image = {}; ImageView imageView = {}; BufferView bufferView = {}; ImageAspectFlagBits plane = ImageAspectFlagBits::eColor; MTLTexture_id mtlTexture = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportMetalTextureInfoEXT; }; # endif template <> struct CppType { using Type = ExportMetalTextureInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ // wrapper struct for struct VkExportSemaphoreCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreCreateInfo.html struct ExportSemaphoreCreateInfo { using NativeType = VkExportSemaphoreCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ExportSemaphoreCreateInfo( *reinterpret_cast( &rhs ) ) { } ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo && setHandleTypes( ExternalSemaphoreHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportSemaphoreCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportSemaphoreCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportSemaphoreCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportSemaphoreCreateInfo const & ) const = default; #else bool operator==( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExportSemaphoreCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExportSemaphoreCreateInfo; const void * pNext = {}; ExternalSemaphoreHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportSemaphoreCreateInfo; }; #endif template <> struct CppType { using Type = ExportSemaphoreCreateInfo; }; using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkExportSemaphoreWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExportSemaphoreWin32HandleInfoKHR.html struct ExportSemaphoreWin32HandleInfoKHR { using NativeType = VkExportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pAttributes{ pAttributes_ } , dwAccess{ dwAccess_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) & VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR && setPAttributes( const SECURITY_ATTRIBUTES * pAttributes_ ) && VULKAN_HPP_NOEXCEPT { pAttributes = pAttributes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) & VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR && setDwAccess( DWORD dwAccess_ ) && VULKAN_HPP_NOEXCEPT { dwAccess = dwAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pAttributes, dwAccess, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const & ) const = default; # else bool operator==( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pAttributes == rhs.pAttributes ) && ( dwAccess == rhs.dwAccess ) && ( name == rhs.name ); # endif } bool operator!=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; const void * pNext = {}; const SECURITY_ATTRIBUTES * pAttributes = {}; DWORD dwAccess = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExportSemaphoreWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ExportSemaphoreWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ // wrapper struct for struct VkExtensionProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExtensionProperties.html struct ExtensionProperties { using NativeType = VkExtensionProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT : extensionName{ extensionName_ } , specVersion{ specVersion_ } { } VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExtensionProperties( *reinterpret_cast( &rhs ) ) { } ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExtensionProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExtensionProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExtensionProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( extensionName, specVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = strcmp( extensionName, rhs.extensionName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( strcmp( extensionName, rhs.extensionName ) == 0 ) && ( specVersion == rhs.specVersion ); } bool operator!=( ExtensionProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: ArrayWrapper1D extensionName = {}; uint32_t specVersion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExtensionProperties; }; #endif // wrapper struct for struct VkExternalMemoryProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryProperties.html struct ExternalMemoryProperties { using NativeType = VkExternalMemoryProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : externalMemoryFeatures{ externalMemoryFeatures_ } , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } , compatibleHandleTypes{ compatibleHandleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryProperties( *reinterpret_cast( &rhs ) ) { } ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryProperties const & ) const = default; #else bool operator==( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); # endif } bool operator!=( ExternalMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ExternalMemoryFeatureFlags externalMemoryFeatures = {}; ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryProperties; }; #endif using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; // wrapper struct for struct VkExternalBufferProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalBufferProperties.html struct ExternalBufferProperties { using NativeType = VkExternalBufferProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalMemoryProperties{ externalMemoryProperties_ } { } VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalBufferProperties( *reinterpret_cast( &rhs ) ) { } ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalBufferProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalBufferProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalBufferProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalMemoryProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalBufferProperties const & ) const = default; #else bool operator==( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); # endif } bool operator!=( ExternalBufferProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalBufferProperties; void * pNext = {}; ExternalMemoryProperties externalMemoryProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalBufferProperties; }; #endif template <> struct CppType { using Type = ExternalBufferProperties; }; using ExternalBufferPropertiesKHR = ExternalBufferProperties; // wrapper struct for struct VkExternalComputeQueueCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueCreateInfoNV.html struct ExternalComputeQueueCreateInfoNV { using NativeType = VkExternalComputeQueueCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( Queue preferredQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , preferredQueue{ preferredQueue_ } { } VULKAN_HPP_CONSTEXPR ExternalComputeQueueCreateInfoNV( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalComputeQueueCreateInfoNV( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalComputeQueueCreateInfoNV( *reinterpret_cast( &rhs ) ) { } ExternalComputeQueueCreateInfoNV & operator=( ExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalComputeQueueCreateInfoNV & operator=( VkExternalComputeQueueCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV & setPreferredQueue( Queue preferredQueue_ ) & VULKAN_HPP_NOEXCEPT { preferredQueue = preferredQueue_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueCreateInfoNV && setPreferredQueue( Queue preferredQueue_ ) && VULKAN_HPP_NOEXCEPT { preferredQueue = preferredQueue_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalComputeQueueCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalComputeQueueCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, preferredQueue ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalComputeQueueCreateInfoNV const & ) const = default; #else bool operator==( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredQueue == rhs.preferredQueue ); # endif } bool operator!=( ExternalComputeQueueCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalComputeQueueCreateInfoNV; const void * pNext = {}; Queue preferredQueue = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalComputeQueueCreateInfoNV; }; #endif template <> struct CppType { using Type = ExternalComputeQueueCreateInfoNV; }; // wrapper struct for struct VkExternalComputeQueueDataParamsNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDataParamsNV.html struct ExternalComputeQueueDataParamsNV { using NativeType = VkExternalComputeQueueDataParamsNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDataParamsNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceIndex{ deviceIndex_ } { } VULKAN_HPP_CONSTEXPR ExternalComputeQueueDataParamsNV( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalComputeQueueDataParamsNV( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalComputeQueueDataParamsNV( *reinterpret_cast( &rhs ) ) { } ExternalComputeQueueDataParamsNV & operator=( ExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalComputeQueueDataParamsNV & operator=( VkExternalComputeQueueDataParamsNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV & setDeviceIndex( uint32_t deviceIndex_ ) & VULKAN_HPP_NOEXCEPT { deviceIndex = deviceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDataParamsNV && setDeviceIndex( uint32_t deviceIndex_ ) && VULKAN_HPP_NOEXCEPT { deviceIndex = deviceIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalComputeQueueDataParamsNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueDataParamsNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueDataParamsNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalComputeQueueDataParamsNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalComputeQueueDataParamsNV const & ) const = default; #else bool operator==( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceIndex == rhs.deviceIndex ); # endif } bool operator!=( ExternalComputeQueueDataParamsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalComputeQueueDataParamsNV; const void * pNext = {}; uint32_t deviceIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalComputeQueueDataParamsNV; }; #endif template <> struct CppType { using Type = ExternalComputeQueueDataParamsNV; }; // wrapper struct for struct VkExternalComputeQueueDeviceCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalComputeQueueDeviceCreateInfoNV.html struct ExternalComputeQueueDeviceCreateInfoNV { using NativeType = VkExternalComputeQueueDeviceCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalComputeQueueDeviceCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( uint32_t reservedExternalQueues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , reservedExternalQueues{ reservedExternalQueues_ } { } VULKAN_HPP_CONSTEXPR ExternalComputeQueueDeviceCreateInfoNV( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalComputeQueueDeviceCreateInfoNV( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalComputeQueueDeviceCreateInfoNV( *reinterpret_cast( &rhs ) ) { } ExternalComputeQueueDeviceCreateInfoNV & operator=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalComputeQueueDeviceCreateInfoNV & operator=( VkExternalComputeQueueDeviceCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV & setReservedExternalQueues( uint32_t reservedExternalQueues_ ) & VULKAN_HPP_NOEXCEPT { reservedExternalQueues = reservedExternalQueues_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalComputeQueueDeviceCreateInfoNV && setReservedExternalQueues( uint32_t reservedExternalQueues_ ) && VULKAN_HPP_NOEXCEPT { reservedExternalQueues = reservedExternalQueues_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalComputeQueueDeviceCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueDeviceCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalComputeQueueDeviceCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalComputeQueueDeviceCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, reservedExternalQueues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalComputeQueueDeviceCreateInfoNV const & ) const = default; #else bool operator==( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reservedExternalQueues == rhs.reservedExternalQueues ); # endif } bool operator!=( ExternalComputeQueueDeviceCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalComputeQueueDeviceCreateInfoNV; const void * pNext = {}; uint32_t reservedExternalQueues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalComputeQueueDeviceCreateInfoNV; }; #endif template <> struct CppType { using Type = ExternalComputeQueueDeviceCreateInfoNV; }; // wrapper struct for struct VkExternalFenceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFenceProperties.html struct ExternalFenceProperties { using NativeType = VkExternalFenceProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, ExternalFenceFeatureFlags externalFenceFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } , compatibleHandleTypes{ compatibleHandleTypes_ } , externalFenceFeatures{ externalFenceFeatures_ } { } VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFenceProperties( *reinterpret_cast( &rhs ) ) { } ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalFenceProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFenceProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalFenceProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalFenceFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalFenceProperties const & ) const = default; #else bool operator==( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalFenceFeatures == rhs.externalFenceFeatures ); # endif } bool operator!=( ExternalFenceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalFenceProperties; void * pNext = {}; ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; ExternalFenceFeatureFlags externalFenceFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalFenceProperties; }; #endif template <> struct CppType { using Type = ExternalFenceProperties; }; using ExternalFencePropertiesKHR = ExternalFenceProperties; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkExternalFormatANDROID, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatANDROID.html struct ExternalFormatANDROID { using NativeType = VkExternalFormatANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalFormat{ externalFormat_ } { } VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatANDROID( *reinterpret_cast( &rhs ) ) { } ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) & VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID && setExternalFormat( uint64_t externalFormat_ ) && VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalFormatANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalFormatANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalFormatANDROID const & ) const = default; # else bool operator==( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); # endif } bool operator!=( ExternalFormatANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExternalFormatANDROID; void * pNext = {}; uint64_t externalFormat = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalFormatANDROID; }; # endif template <> struct CppType { using Type = ExternalFormatANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkExternalFormatOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatOHOS.html struct ExternalFormatOHOS { using NativeType = VkExternalFormatOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFormatOHOS( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalFormat{ externalFormat_ } { } VULKAN_HPP_CONSTEXPR ExternalFormatOHOS( ExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalFormatOHOS( VkExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatOHOS( *reinterpret_cast( &rhs ) ) {} ExternalFormatOHOS & operator=( ExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalFormatOHOS & operator=( VkExternalFormatOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS & setExternalFormat( uint64_t externalFormat_ ) & VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatOHOS && setExternalFormat( uint64_t externalFormat_ ) && VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalFormatOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalFormatOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalFormatOHOS const & ) const = default; # else bool operator==( ExternalFormatOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); # endif } bool operator!=( ExternalFormatOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExternalFormatOHOS; void * pNext = {}; uint64_t externalFormat = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalFormatOHOS; }; # endif template <> struct CppType { using Type = ExternalFormatOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ #if defined( VK_USE_PLATFORM_SCREEN_QNX ) // wrapper struct for struct VkExternalFormatQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalFormatQNX.html struct ExternalFormatQNX { using NativeType = VkExternalFormatQNX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatQNX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalFormat{ externalFormat_ } { } VULKAN_HPP_CONSTEXPR ExternalFormatQNX( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalFormatQNX( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalFormatQNX( *reinterpret_cast( &rhs ) ) {} ExternalFormatQNX & operator=( ExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalFormatQNX & operator=( VkExternalFormatQNX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX & setExternalFormat( uint64_t externalFormat_ ) & VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalFormatQNX && setExternalFormat( uint64_t externalFormat_ ) && VULKAN_HPP_NOEXCEPT { externalFormat = externalFormat_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalFormatQNX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatQNX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalFormatQNX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalFormatQNX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalFormatQNX const & ) const = default; # else bool operator==( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormat == rhs.externalFormat ); # endif } bool operator!=( ExternalFormatQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eExternalFormatQNX; void * pNext = {}; uint64_t externalFormat = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalFormatQNX; }; # endif template <> struct CppType { using Type = ExternalFormatQNX; }; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ // wrapper struct for struct VkExternalImageFormatProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatProperties.html struct ExternalImageFormatProperties { using NativeType = VkExternalImageFormatProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalMemoryProperties{ externalMemoryProperties_ } { } VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalImageFormatProperties( *reinterpret_cast( &rhs ) ) { } ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalImageFormatProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalMemoryProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalImageFormatProperties const & ) const = default; #else bool operator==( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); # endif } bool operator!=( ExternalImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalImageFormatProperties; void * pNext = {}; ExternalMemoryProperties externalMemoryProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalImageFormatProperties; }; #endif template <> struct CppType { using Type = ExternalImageFormatProperties; }; using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; // wrapper struct for struct VkImageFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties.html struct ImageFormatProperties { using NativeType = VkImageFormatProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageFormatProperties( Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, SampleCountFlags sampleCounts_ = {}, DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxExtent{ maxExtent_ } , maxMipLevels{ maxMipLevels_ } , maxArrayLayers{ maxArrayLayers_ } , sampleCounts{ sampleCounts_ } , maxResourceSize{ maxResourceSize_ } { } VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ImageFormatProperties( *reinterpret_cast( &rhs ) ) { } ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkImageFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageFormatProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( maxExtent, maxMipLevels, maxArrayLayers, sampleCounts, maxResourceSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageFormatProperties const & ) const = default; #else bool operator==( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( maxExtent == rhs.maxExtent ) && ( maxMipLevels == rhs.maxMipLevels ) && ( maxArrayLayers == rhs.maxArrayLayers ) && ( sampleCounts == rhs.sampleCounts ) && ( maxResourceSize == rhs.maxResourceSize ); # endif } bool operator!=( ImageFormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Extent3D maxExtent = {}; uint32_t maxMipLevels = {}; uint32_t maxArrayLayers = {}; SampleCountFlags sampleCounts = {}; DeviceSize maxResourceSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageFormatProperties; }; #endif // wrapper struct for struct VkExternalImageFormatPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalImageFormatPropertiesNV.html struct ExternalImageFormatPropertiesNV { using NativeType = VkExternalImageFormatPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ImageFormatProperties imageFormatProperties_ = {}, ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT : imageFormatProperties{ imageFormatProperties_ } , externalMemoryFeatures{ externalMemoryFeatures_ } , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } , compatibleHandleTypes{ compatibleHandleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) { } ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( imageFormatProperties, externalMemoryFeatures, exportFromImportedHandleTypes, compatibleHandleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalImageFormatPropertiesNV const & ) const = default; #else bool operator==( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( imageFormatProperties == rhs.imageFormatProperties ) && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); # endif } bool operator!=( ExternalImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageFormatProperties imageFormatProperties = {}; ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalImageFormatPropertiesNV; }; #endif // wrapper struct for struct VkExternalMemoryAcquireUnmodifiedEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryAcquireUnmodifiedEXT.html struct ExternalMemoryAcquireUnmodifiedEXT { using NativeType = VkExternalMemoryAcquireUnmodifiedEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( Bool32 acquireUnmodifiedMemory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryAcquireUnmodifiedEXT( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryAcquireUnmodifiedEXT( *reinterpret_cast( &rhs ) ) { } ExternalMemoryAcquireUnmodifiedEXT & operator=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryAcquireUnmodifiedEXT & operator=( VkExternalMemoryAcquireUnmodifiedEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT & setAcquireUnmodifiedMemory( Bool32 acquireUnmodifiedMemory_ ) & VULKAN_HPP_NOEXCEPT { acquireUnmodifiedMemory = acquireUnmodifiedMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryAcquireUnmodifiedEXT && setAcquireUnmodifiedMemory( Bool32 acquireUnmodifiedMemory_ ) && VULKAN_HPP_NOEXCEPT { acquireUnmodifiedMemory = acquireUnmodifiedMemory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalMemoryAcquireUnmodifiedEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryAcquireUnmodifiedEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryAcquireUnmodifiedEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryAcquireUnmodifiedEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, acquireUnmodifiedMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryAcquireUnmodifiedEXT const & ) const = default; #else bool operator==( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( acquireUnmodifiedMemory == rhs.acquireUnmodifiedMemory ); # endif } bool operator!=( ExternalMemoryAcquireUnmodifiedEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalMemoryAcquireUnmodifiedEXT; const void * pNext = {}; Bool32 acquireUnmodifiedMemory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryAcquireUnmodifiedEXT; }; #endif template <> struct CppType { using Type = ExternalMemoryAcquireUnmodifiedEXT; }; // wrapper struct for struct VkExternalMemoryBufferCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryBufferCreateInfo.html struct ExternalMemoryBufferCreateInfo { using NativeType = VkExternalMemoryBufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryBufferCreateInfo( *reinterpret_cast( &rhs ) ) { } ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo && setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalMemoryBufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryBufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryBufferCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryBufferCreateInfo const & ) const = default; #else bool operator==( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExternalMemoryBufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; const void * pNext = {}; ExternalMemoryHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryBufferCreateInfo; }; #endif template <> struct CppType { using Type = ExternalMemoryBufferCreateInfo; }; using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; // wrapper struct for struct VkExternalMemoryImageCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfo.html struct ExternalMemoryImageCreateInfo { using NativeType = VkExternalMemoryImageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryImageCreateInfo( *reinterpret_cast( &rhs ) ) { } ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo && setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalMemoryImageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryImageCreateInfo const & ) const = default; #else bool operator==( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExternalMemoryImageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalMemoryImageCreateInfo; const void * pNext = {}; ExternalMemoryHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryImageCreateInfo; }; #endif template <> struct CppType { using Type = ExternalMemoryImageCreateInfo; }; using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; // wrapper struct for struct VkExternalMemoryImageCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryImageCreateInfoNV.html struct ExternalMemoryImageCreateInfoNV { using NativeType = VkExternalMemoryImageCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryImageCreateInfoNV( *reinterpret_cast( &rhs ) ) { } ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV && setHandleTypes( ExternalMemoryHandleTypeFlagsNV handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalMemoryImageCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryImageCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryImageCreateInfoNV const & ) const = default; #else bool operator==( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExternalMemoryImageCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; const void * pNext = {}; ExternalMemoryHandleTypeFlagsNV handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryImageCreateInfoNV; }; #endif template <> struct CppType { using Type = ExternalMemoryImageCreateInfoNV; }; // wrapper struct for struct VkExternalMemoryTensorCreateInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalMemoryTensorCreateInfoARM.html struct ExternalMemoryTensorCreateInfoARM { using NativeType = VkExternalMemoryTensorCreateInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryTensorCreateInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM( ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleTypes{ handleTypes_ } { } VULKAN_HPP_CONSTEXPR ExternalMemoryTensorCreateInfoARM( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalMemoryTensorCreateInfoARM( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalMemoryTensorCreateInfoARM( *reinterpret_cast( &rhs ) ) { } ExternalMemoryTensorCreateInfoARM & operator=( ExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalMemoryTensorCreateInfoARM & operator=( VkExternalMemoryTensorCreateInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM & setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) & VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalMemoryTensorCreateInfoARM && setHandleTypes( ExternalMemoryHandleTypeFlags handleTypes_ ) && VULKAN_HPP_NOEXCEPT { handleTypes = handleTypes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalMemoryTensorCreateInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryTensorCreateInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalMemoryTensorCreateInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalMemoryTensorCreateInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalMemoryTensorCreateInfoARM const & ) const = default; #else bool operator==( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleTypes == rhs.handleTypes ); # endif } bool operator!=( ExternalMemoryTensorCreateInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalMemoryTensorCreateInfoARM; const void * pNext = {}; ExternalMemoryHandleTypeFlags handleTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalMemoryTensorCreateInfoARM; }; #endif template <> struct CppType { using Type = ExternalMemoryTensorCreateInfoARM; }; // wrapper struct for struct VkExternalSemaphoreProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalSemaphoreProperties.html struct ExternalSemaphoreProperties { using NativeType = VkExternalSemaphoreProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } , compatibleHandleTypes{ compatibleHandleTypes_ } , externalSemaphoreFeatures{ externalSemaphoreFeatures_ } { } VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalSemaphoreProperties( *reinterpret_cast( &rhs ) ) { } ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkExternalSemaphoreProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalSemaphoreProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalSemaphoreProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, exportFromImportedHandleTypes, compatibleHandleTypes, externalSemaphoreFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalSemaphoreProperties const & ) const = default; #else bool operator==( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); # endif } bool operator!=( ExternalSemaphoreProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalSemaphoreProperties; void * pNext = {}; ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalSemaphoreProperties; }; #endif template <> struct CppType { using Type = ExternalSemaphoreProperties; }; using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; // wrapper struct for struct VkExternalTensorPropertiesARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkExternalTensorPropertiesARM.html struct ExternalTensorPropertiesARM { using NativeType = VkExternalTensorPropertiesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalTensorPropertiesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM( ExternalMemoryProperties externalMemoryProperties_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalMemoryProperties{ externalMemoryProperties_ } { } VULKAN_HPP_CONSTEXPR ExternalTensorPropertiesARM( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; ExternalTensorPropertiesARM( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : ExternalTensorPropertiesARM( *reinterpret_cast( &rhs ) ) { } ExternalTensorPropertiesARM & operator=( ExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ExternalTensorPropertiesARM & operator=( VkExternalTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM & setExternalMemoryProperties( ExternalMemoryProperties const & externalMemoryProperties_ ) & VULKAN_HPP_NOEXCEPT { externalMemoryProperties = externalMemoryProperties_; return *this; } VULKAN_HPP_CONSTEXPR_14 ExternalTensorPropertiesARM && setExternalMemoryProperties( ExternalMemoryProperties const & externalMemoryProperties_ ) && VULKAN_HPP_NOEXCEPT { externalMemoryProperties = externalMemoryProperties_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkExternalTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkExternalTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkExternalTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalMemoryProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ExternalTensorPropertiesARM const & ) const = default; #else bool operator==( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryProperties == rhs.externalMemoryProperties ); # endif } bool operator!=( ExternalTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eExternalTensorPropertiesARM; const void * pNext = {}; ExternalMemoryProperties externalMemoryProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ExternalTensorPropertiesARM; }; #endif template <> struct CppType { using Type = ExternalTensorPropertiesARM; }; // wrapper struct for struct VkFenceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceCreateInfo.html struct FenceCreateInfo { using NativeType = VkFenceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FenceCreateInfo( *reinterpret_cast( &rhs ) ) {} FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & setFlags( FenceCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo && setFlags( FenceCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFenceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFenceCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FenceCreateInfo const & ) const = default; #else bool operator==( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( FenceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFenceCreateInfo; const void * pNext = {}; FenceCreateFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FenceCreateInfo; }; #endif template <> struct CppType { using Type = FenceCreateInfo; }; // wrapper struct for struct VkFenceGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetFdInfoKHR.html struct FenceGetFdInfoKHR { using NativeType = VkFenceGetFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( Fence fence_ = {}, ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fence{ fence_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setFence( Fence fence_ ) & VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR && setFence( Fence fence_ ) && VULKAN_HPP_NOEXCEPT { fence = fence_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR && setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFenceGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFenceGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fence, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FenceGetFdInfoKHR const & ) const = default; #else bool operator==( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); # endif } bool operator!=( FenceGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFenceGetFdInfoKHR; const void * pNext = {}; Fence fence = {}; ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FenceGetFdInfoKHR; }; #endif template <> struct CppType { using Type = FenceGetFdInfoKHR; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkFenceGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFenceGetWin32HandleInfoKHR.html struct FenceGetWin32HandleInfoKHR { using NativeType = VkFenceGetWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( Fence fence_ = {}, ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fence{ fence_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FenceGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setFence( Fence fence_ ) & VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR && setFence( Fence fence_ ) && VULKAN_HPP_NOEXCEPT { fence = fence_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR && setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFenceGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFenceGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFenceGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fence, handleType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FenceGetWin32HandleInfoKHR const & ) const = default; # else bool operator==( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( handleType == rhs.handleType ); # endif } bool operator!=( FenceGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; const void * pNext = {}; Fence fence = {}; ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FenceGetWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = FenceGetWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ // wrapper struct for struct VkFilterCubicImageViewImageFormatPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFilterCubicImageViewImageFormatPropertiesEXT.html struct FilterCubicImageViewImageFormatPropertiesEXT { using NativeType = VkFilterCubicImageViewImageFormatPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( Bool32 filterCubic_ = {}, Bool32 filterCubicMinmax_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , filterCubic{ filterCubic_ } , filterCubicMinmax{ filterCubicMinmax_ } { } VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast( &rhs ) ) { } FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkFilterCubicImageViewImageFormatPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFilterCubicImageViewImageFormatPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFilterCubicImageViewImageFormatPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, filterCubic, filterCubicMinmax ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const & ) const = default; #else bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCubic == rhs.filterCubic ) && ( filterCubicMinmax == rhs.filterCubicMinmax ); # endif } bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; void * pNext = {}; Bool32 filterCubic = {}; Bool32 filterCubicMinmax = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FilterCubicImageViewImageFormatPropertiesEXT; }; #endif template <> struct CppType { using Type = FilterCubicImageViewImageFormatPropertiesEXT; }; // wrapper struct for struct VkFormatProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties.html struct FormatProperties { using NativeType = VkFormatProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FormatProperties( FormatFeatureFlags linearTilingFeatures_ = {}, FormatFeatureFlags optimalTilingFeatures_ = {}, FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT : linearTilingFeatures{ linearTilingFeatures_ } , optimalTilingFeatures{ optimalTilingFeatures_ } , bufferFeatures{ bufferFeatures_ } { } VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties( *reinterpret_cast( &rhs ) ) {} FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkFormatProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFormatProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FormatProperties const & ) const = default; #else bool operator==( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); # endif } bool operator!=( FormatProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: FormatFeatureFlags linearTilingFeatures = {}; FormatFeatureFlags optimalTilingFeatures = {}; FormatFeatureFlags bufferFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FormatProperties; }; #endif // wrapper struct for struct VkFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties2.html struct FormatProperties2 { using NativeType = VkFormatProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , formatProperties{ formatProperties_ } { } VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties2( *reinterpret_cast( &rhs ) ) {} FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFormatProperties2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, formatProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FormatProperties2 const & ) const = default; #else bool operator==( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatProperties == rhs.formatProperties ); # endif } bool operator!=( FormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFormatProperties2; void * pNext = {}; FormatProperties formatProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FormatProperties2; }; #endif template <> struct CppType { using Type = FormatProperties2; }; using FormatProperties2KHR = FormatProperties2; // wrapper struct for struct VkFormatProperties3, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFormatProperties3.html struct FormatProperties3 { using NativeType = VkFormatProperties3; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties3; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FormatProperties3( FormatFeatureFlags2 linearTilingFeatures_ = {}, FormatFeatureFlags2 optimalTilingFeatures_ = {}, FormatFeatureFlags2 bufferFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , linearTilingFeatures{ linearTilingFeatures_ } , optimalTilingFeatures{ optimalTilingFeatures_ } , bufferFeatures{ bufferFeatures_ } { } VULKAN_HPP_CONSTEXPR FormatProperties3( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; FormatProperties3( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT : FormatProperties3( *reinterpret_cast( &rhs ) ) {} FormatProperties3 & operator=( FormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FormatProperties3 & operator=( VkFormatProperties3 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkFormatProperties3 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties3 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFormatProperties3 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFormatProperties3 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, linearTilingFeatures, optimalTilingFeatures, bufferFeatures ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FormatProperties3 const & ) const = default; #else bool operator==( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearTilingFeatures == rhs.linearTilingFeatures ) && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) && ( bufferFeatures == rhs.bufferFeatures ); # endif } bool operator!=( FormatProperties3 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFormatProperties3; void * pNext = {}; FormatFeatureFlags2 linearTilingFeatures = {}; FormatFeatureFlags2 optimalTilingFeatures = {}; FormatFeatureFlags2 bufferFeatures = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FormatProperties3; }; #endif template <> struct CppType { using Type = FormatProperties3; }; using FormatProperties3KHR = FormatProperties3; // wrapper struct for struct VkFragmentShadingRateAttachmentInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFragmentShadingRateAttachmentInfoKHR.html struct FragmentShadingRateAttachmentInfoKHR { using NativeType = VkFragmentShadingRateAttachmentInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ } , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } { } VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast( &rhs ) ) { } FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const AttachmentReference2 * pFragmentShadingRateAttachment_ ) & VULKAN_HPP_NOEXCEPT { pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR && setPFragmentShadingRateAttachment( const AttachmentReference2 * pFragmentShadingRateAttachment_ ) && VULKAN_HPP_NOEXCEPT { pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( Extent2D const & shadingRateAttachmentTexelSize_ ) & VULKAN_HPP_NOEXCEPT { shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR && setShadingRateAttachmentTexelSize( Extent2D const & shadingRateAttachmentTexelSize_ ) && VULKAN_HPP_NOEXCEPT { shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFragmentShadingRateAttachmentInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFragmentShadingRateAttachmentInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFragmentShadingRateAttachmentInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pFragmentShadingRateAttachment, shadingRateAttachmentTexelSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FragmentShadingRateAttachmentInfoKHR const & ) const = default; #else bool operator==( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment ) && ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize ); # endif } bool operator!=( FragmentShadingRateAttachmentInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR; const void * pNext = {}; const AttachmentReference2 * pFragmentShadingRateAttachment = {}; Extent2D shadingRateAttachmentTexelSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FragmentShadingRateAttachmentInfoKHR; }; #endif template <> struct CppType { using Type = FragmentShadingRateAttachmentInfoKHR; }; // wrapper struct for struct VkFrameBoundaryEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryEXT.html struct FrameBoundaryEXT { using NativeType = VkFrameBoundaryEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryFlagsEXT flags_ = {}, uint64_t frameID_ = {}, uint32_t imageCount_ = {}, const Image * pImages_ = {}, uint32_t bufferCount_ = {}, const Buffer * pBuffers_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , frameID{ frameID_ } , imageCount{ imageCount_ } , pImages{ pImages_ } , bufferCount{ bufferCount_ } , pBuffers{ pBuffers_ } , tagName{ tagName_ } , tagSize{ tagSize_ } , pTag{ pTag_ } { } VULKAN_HPP_CONSTEXPR FrameBoundaryEXT( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; FrameBoundaryEXT( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryEXT( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template FrameBoundaryEXT( FrameBoundaryFlagsEXT flags_, uint64_t frameID_, ArrayProxyNoTemporaries const & images_, ArrayProxyNoTemporaries const & buffers_ = {}, uint64_t tagName_ = {}, ArrayProxyNoTemporaries const & tag_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , frameID( frameID_ ) , imageCount( static_cast( images_.size() ) ) , pImages( images_.data() ) , bufferCount( static_cast( buffers_.size() ) ) , pBuffers( buffers_.data() ) , tagName( tagName_ ) , tagSize( tag_.size() * sizeof( T ) ) , pTag( tag_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ FrameBoundaryEXT & operator=( FrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FrameBoundaryEXT & operator=( VkFrameBoundaryEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFlags( FrameBoundaryFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setFlags( FrameBoundaryFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setFrameID( uint64_t frameID_ ) & VULKAN_HPP_NOEXCEPT { frameID = frameID_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setFrameID( uint64_t frameID_ ) && VULKAN_HPP_NOEXCEPT { frameID = frameID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setImageCount( uint32_t imageCount_ ) & VULKAN_HPP_NOEXCEPT { imageCount = imageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setImageCount( uint32_t imageCount_ ) && VULKAN_HPP_NOEXCEPT { imageCount = imageCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPImages( const Image * pImages_ ) & VULKAN_HPP_NOEXCEPT { pImages = pImages_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setPImages( const Image * pImages_ ) && VULKAN_HPP_NOEXCEPT { pImages = pImages_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FrameBoundaryEXT & setImages( ArrayProxyNoTemporaries const & images_ ) VULKAN_HPP_NOEXCEPT { imageCount = static_cast( images_.size() ); pImages = images_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setBufferCount( uint32_t bufferCount_ ) & VULKAN_HPP_NOEXCEPT { bufferCount = bufferCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setBufferCount( uint32_t bufferCount_ ) && VULKAN_HPP_NOEXCEPT { bufferCount = bufferCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPBuffers( const Buffer * pBuffers_ ) & VULKAN_HPP_NOEXCEPT { pBuffers = pBuffers_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setPBuffers( const Buffer * pBuffers_ ) && VULKAN_HPP_NOEXCEPT { pBuffers = pBuffers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FrameBoundaryEXT & setBuffers( ArrayProxyNoTemporaries const & buffers_ ) VULKAN_HPP_NOEXCEPT { bufferCount = static_cast( buffers_.size() ); pBuffers = buffers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagName( uint64_t tagName_ ) & VULKAN_HPP_NOEXCEPT { tagName = tagName_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setTagName( uint64_t tagName_ ) && VULKAN_HPP_NOEXCEPT { tagName = tagName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setTagSize( size_t tagSize_ ) & VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setTagSize( size_t tagSize_ ) && VULKAN_HPP_NOEXCEPT { tagSize = tagSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT & setPTag( const void * pTag_ ) & VULKAN_HPP_NOEXCEPT { pTag = pTag_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryEXT && setPTag( const void * pTag_ ) && VULKAN_HPP_NOEXCEPT { pTag = pTag_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template FrameBoundaryEXT & setTag( ArrayProxyNoTemporaries const & tag_ ) VULKAN_HPP_NOEXCEPT { tagSize = tag_.size() * sizeof( T ); pTag = tag_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFrameBoundaryEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFrameBoundaryEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFrameBoundaryEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFrameBoundaryEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, frameID, imageCount, pImages, bufferCount, pBuffers, tagName, tagSize, pTag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FrameBoundaryEXT const & ) const = default; #else bool operator==( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( frameID == rhs.frameID ) && ( imageCount == rhs.imageCount ) && ( pImages == rhs.pImages ) && ( bufferCount == rhs.bufferCount ) && ( pBuffers == rhs.pBuffers ) && ( tagName == rhs.tagName ) && ( tagSize == rhs.tagSize ) && ( pTag == rhs.pTag ); # endif } bool operator!=( FrameBoundaryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFrameBoundaryEXT; const void * pNext = {}; FrameBoundaryFlagsEXT flags = {}; uint64_t frameID = {}; uint32_t imageCount = {}; const Image * pImages = {}; uint32_t bufferCount = {}; const Buffer * pBuffers = {}; uint64_t tagName = {}; size_t tagSize = {}; const void * pTag = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FrameBoundaryEXT; }; #endif template <> struct CppType { using Type = FrameBoundaryEXT; }; // wrapper struct for struct VkFrameBoundaryTensorsARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFrameBoundaryTensorsARM.html struct FrameBoundaryTensorsARM { using NativeType = VkFrameBoundaryTensorsARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFrameBoundaryTensorsARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FrameBoundaryTensorsARM( uint32_t tensorCount_ = {}, const TensorARM * pTensors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensorCount{ tensorCount_ } , pTensors{ pTensors_ } { } VULKAN_HPP_CONSTEXPR FrameBoundaryTensorsARM( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; FrameBoundaryTensorsARM( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT : FrameBoundaryTensorsARM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FrameBoundaryTensorsARM( ArrayProxyNoTemporaries const & tensors_, const void * pNext_ = nullptr ) : pNext( pNext_ ), tensorCount( static_cast( tensors_.size() ) ), pTensors( tensors_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ FrameBoundaryTensorsARM & operator=( FrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FrameBoundaryTensorsARM & operator=( VkFrameBoundaryTensorsARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setTensorCount( uint32_t tensorCount_ ) & VULKAN_HPP_NOEXCEPT { tensorCount = tensorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM && setTensorCount( uint32_t tensorCount_ ) && VULKAN_HPP_NOEXCEPT { tensorCount = tensorCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM & setPTensors( const TensorARM * pTensors_ ) & VULKAN_HPP_NOEXCEPT { pTensors = pTensors_; return *this; } VULKAN_HPP_CONSTEXPR_14 FrameBoundaryTensorsARM && setPTensors( const TensorARM * pTensors_ ) && VULKAN_HPP_NOEXCEPT { pTensors = pTensors_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FrameBoundaryTensorsARM & setTensors( ArrayProxyNoTemporaries const & tensors_ ) VULKAN_HPP_NOEXCEPT { tensorCount = static_cast( tensors_.size() ); pTensors = tensors_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFrameBoundaryTensorsARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFrameBoundaryTensorsARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFrameBoundaryTensorsARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFrameBoundaryTensorsARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensorCount, pTensors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FrameBoundaryTensorsARM const & ) const = default; #else bool operator==( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorCount == rhs.tensorCount ) && ( pTensors == rhs.pTensors ); # endif } bool operator!=( FrameBoundaryTensorsARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFrameBoundaryTensorsARM; const void * pNext = {}; uint32_t tensorCount = {}; const TensorARM * pTensors = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FrameBoundaryTensorsARM; }; #endif template <> struct CppType { using Type = FrameBoundaryTensorsARM; }; // wrapper struct for struct VkFramebufferAttachmentImageInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentImageInfo.html struct FramebufferAttachmentImageInfo { using NativeType = VkFramebufferAttachmentImageInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( ImageCreateFlags flags_ = {}, ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const Format * pViewFormats_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , usage{ usage_ } , width{ width_ } , height{ height_ } , layerCount{ layerCount_ } , viewFormatCount{ viewFormatCount_ } , pViewFormats{ pViewFormats_ } { } VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FramebufferAttachmentImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferAttachmentImageInfo( ImageCreateFlags flags_, ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, ArrayProxyNoTemporaries const & viewFormats_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , usage( usage_ ) , width( width_ ) , height( height_ ) , layerCount( layerCount_ ) , viewFormatCount( static_cast( viewFormats_.size() ) ) , pViewFormats( viewFormats_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setFlags( ImageCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setFlags( ImageCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setUsage( ImageUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setUsage( ImageUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setWidth( uint32_t width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setHeight( uint32_t height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) & VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setLayerCount( uint32_t layerCount_ ) && VULKAN_HPP_NOEXCEPT { layerCount = layerCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) & VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setViewFormatCount( uint32_t viewFormatCount_ ) && VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & setPViewFormats( const Format * pViewFormats_ ) & VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo && setPViewFormats( const Format * pViewFormats_ ) && VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferAttachmentImageInfo & setViewFormats( ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT { viewFormatCount = static_cast( viewFormats_.size() ); pViewFormats = viewFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFramebufferAttachmentImageInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferAttachmentImageInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFramebufferAttachmentImageInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, usage, width, height, layerCount, viewFormatCount, pViewFormats ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FramebufferAttachmentImageInfo const & ) const = default; #else bool operator==( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( width == rhs.width ) && ( height == rhs.height ) && ( layerCount == rhs.layerCount ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); # endif } bool operator!=( FramebufferAttachmentImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFramebufferAttachmentImageInfo; const void * pNext = {}; ImageCreateFlags flags = {}; ImageUsageFlags usage = {}; uint32_t width = {}; uint32_t height = {}; uint32_t layerCount = {}; uint32_t viewFormatCount = {}; const Format * pViewFormats = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FramebufferAttachmentImageInfo; }; #endif template <> struct CppType { using Type = FramebufferAttachmentImageInfo; }; using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo; // wrapper struct for struct VkFramebufferAttachmentsCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferAttachmentsCreateInfo.html struct FramebufferAttachmentsCreateInfo { using NativeType = VkFramebufferAttachmentsCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, const FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , attachmentImageInfoCount{ attachmentImageInfoCount_ } , pAttachmentImageInfos{ pAttachmentImageInfos_ } { } VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FramebufferAttachmentsCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferAttachmentsCreateInfo( ArrayProxyNoTemporaries const & attachmentImageInfos_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , attachmentImageInfoCount( static_cast( attachmentImageInfos_.size() ) ) , pAttachmentImageInfos( attachmentImageInfos_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) & VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = attachmentImageInfoCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo && setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) && VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = attachmentImageInfoCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) & VULKAN_HPP_NOEXCEPT { pAttachmentImageInfos = pAttachmentImageInfos_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo && setPAttachmentImageInfos( const FramebufferAttachmentImageInfo * pAttachmentImageInfos_ ) && VULKAN_HPP_NOEXCEPT { pAttachmentImageInfos = pAttachmentImageInfos_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( ArrayProxyNoTemporaries const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT { attachmentImageInfoCount = static_cast( attachmentImageInfos_.size() ); pAttachmentImageInfos = attachmentImageInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFramebufferAttachmentsCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferAttachmentsCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFramebufferAttachmentsCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, attachmentImageInfoCount, pAttachmentImageInfos ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FramebufferAttachmentsCreateInfo const & ) const = default; #else bool operator==( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); # endif } bool operator!=( FramebufferAttachmentsCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo; const void * pNext = {}; uint32_t attachmentImageInfoCount = {}; const FramebufferAttachmentImageInfo * pAttachmentImageInfos = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FramebufferAttachmentsCreateInfo; }; #endif template <> struct CppType { using Type = FramebufferAttachmentsCreateInfo; }; using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo; // wrapper struct for struct VkFramebufferCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferCreateInfo.html struct FramebufferCreateInfo { using NativeType = VkFramebufferCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateFlags flags_ = {}, RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const ImageView * pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , renderPass{ renderPass_ } , attachmentCount{ attachmentCount_ } , pAttachments{ pAttachments_ } , width{ width_ } , height{ height_ } , layers{ layers_ } { } VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : FramebufferCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferCreateInfo( FramebufferCreateFlags flags_, RenderPass renderPass_, ArrayProxyNoTemporaries const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , renderPass( renderPass_ ) , attachmentCount( static_cast( attachments_.size() ) ) , pAttachments( attachments_.data() ) , width( width_ ) , height( height_ ) , layers( layers_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setFlags( FramebufferCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setFlags( FramebufferCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setRenderPass( RenderPass renderPass_ ) & VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setRenderPass( RenderPass renderPass_ ) && VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) & VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setAttachmentCount( uint32_t attachmentCount_ ) && VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setPAttachments( const ImageView * pAttachments_ ) & VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setPAttachments( const ImageView * pAttachments_ ) && VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) FramebufferCreateInfo & setAttachments( ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast( attachments_.size() ); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setWidth( uint32_t width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setWidth( uint32_t width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setHeight( uint32_t height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setHeight( uint32_t height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & setLayers( uint32_t layers_ ) & VULKAN_HPP_NOEXCEPT { layers = layers_; return *this; } VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo && setLayers( uint32_t layers_ ) && VULKAN_HPP_NOEXCEPT { layers = layers_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkFramebufferCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFramebufferCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, renderPass, attachmentCount, pAttachments, width, height, layers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FramebufferCreateInfo const & ) const = default; #else bool operator==( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( renderPass == rhs.renderPass ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( width == rhs.width ) && ( height == rhs.height ) && ( layers == rhs.layers ); # endif } bool operator!=( FramebufferCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFramebufferCreateInfo; const void * pNext = {}; FramebufferCreateFlags flags = {}; RenderPass renderPass = {}; uint32_t attachmentCount = {}; const ImageView * pAttachments = {}; uint32_t width = {}; uint32_t height = {}; uint32_t layers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FramebufferCreateInfo; }; #endif template <> struct CppType { using Type = FramebufferCreateInfo; }; // wrapper struct for struct VkFramebufferMixedSamplesCombinationNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkFramebufferMixedSamplesCombinationNV.html struct FramebufferMixedSamplesCombinationNV { using NativeType = VkFramebufferMixedSamplesCombinationNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( CoverageReductionModeNV coverageReductionMode_ = CoverageReductionModeNV::eMerge, SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, SampleCountFlags depthStencilSamples_ = {}, SampleCountFlags colorSamples_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , coverageReductionMode{ coverageReductionMode_ } , rasterizationSamples{ rasterizationSamples_ } , depthStencilSamples{ depthStencilSamples_ } , colorSamples{ colorSamples_ } { } VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT : FramebufferMixedSamplesCombinationNV( *reinterpret_cast( &rhs ) ) { } FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkFramebufferMixedSamplesCombinationNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkFramebufferMixedSamplesCombinationNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkFramebufferMixedSamplesCombinationNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, coverageReductionMode, rasterizationSamples, depthStencilSamples, colorSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( FramebufferMixedSamplesCombinationNV const & ) const = default; #else bool operator==( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ) && ( rasterizationSamples == rhs.rasterizationSamples ) && ( depthStencilSamples == rhs.depthStencilSamples ) && ( colorSamples == rhs.colorSamples ); # endif } bool operator!=( FramebufferMixedSamplesCombinationNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; void * pNext = {}; CoverageReductionModeNV coverageReductionMode = CoverageReductionModeNV::eMerge; SampleCountFlagBits rasterizationSamples = SampleCountFlagBits::e1; SampleCountFlags depthStencilSamples = {}; SampleCountFlags colorSamples = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = FramebufferMixedSamplesCombinationNV; }; #endif template <> struct CppType { using Type = FramebufferMixedSamplesCombinationNV; }; // wrapper struct for struct VkGeneratedCommandsInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoEXT.html struct GeneratedCommandsInfoEXT { using NativeType = VkGeneratedCommandsInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( ShaderStageFlags shaderStages_ = {}, IndirectExecutionSetEXT indirectExecutionSet_ = {}, IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, DeviceAddress indirectAddress_ = {}, DeviceSize indirectAddressSize_ = {}, DeviceAddress preprocessAddress_ = {}, DeviceSize preprocessSize_ = {}, uint32_t maxSequenceCount_ = {}, DeviceAddress sequenceCountAddress_ = {}, uint32_t maxDrawCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderStages{ shaderStages_ } , indirectExecutionSet{ indirectExecutionSet_ } , indirectCommandsLayout{ indirectCommandsLayout_ } , indirectAddress{ indirectAddress_ } , indirectAddressSize{ indirectAddressSize_ } , preprocessAddress{ preprocessAddress_ } , preprocessSize{ preprocessSize_ } , maxSequenceCount{ maxSequenceCount_ } , sequenceCountAddress{ sequenceCountAddress_ } , maxDrawCount{ maxDrawCount_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsInfoEXT( *reinterpret_cast( &rhs ) ) { } GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( ShaderStageFlags shaderStages_ ) & VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setShaderStages( ShaderStageFlags shaderStages_ ) && VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) & VULKAN_HPP_NOEXCEPT { indirectExecutionSet = indirectExecutionSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) && VULKAN_HPP_NOEXCEPT { indirectExecutionSet = indirectExecutionSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) & VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) && VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( DeviceAddress indirectAddress_ ) & VULKAN_HPP_NOEXCEPT { indirectAddress = indirectAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setIndirectAddress( DeviceAddress indirectAddress_ ) && VULKAN_HPP_NOEXCEPT { indirectAddress = indirectAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( DeviceSize indirectAddressSize_ ) & VULKAN_HPP_NOEXCEPT { indirectAddressSize = indirectAddressSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setIndirectAddressSize( DeviceSize indirectAddressSize_ ) && VULKAN_HPP_NOEXCEPT { indirectAddressSize = indirectAddressSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( DeviceAddress preprocessAddress_ ) & VULKAN_HPP_NOEXCEPT { preprocessAddress = preprocessAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setPreprocessAddress( DeviceAddress preprocessAddress_ ) && VULKAN_HPP_NOEXCEPT { preprocessAddress = preprocessAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( DeviceSize preprocessSize_ ) & VULKAN_HPP_NOEXCEPT { preprocessSize = preprocessSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setPreprocessSize( DeviceSize preprocessSize_ ) && VULKAN_HPP_NOEXCEPT { preprocessSize = preprocessSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) & VULKAN_HPP_NOEXCEPT { maxSequenceCount = maxSequenceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setMaxSequenceCount( uint32_t maxSequenceCount_ ) && VULKAN_HPP_NOEXCEPT { maxSequenceCount = maxSequenceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( DeviceAddress sequenceCountAddress_ ) & VULKAN_HPP_NOEXCEPT { sequenceCountAddress = sequenceCountAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setSequenceCountAddress( DeviceAddress sequenceCountAddress_ ) && VULKAN_HPP_NOEXCEPT { sequenceCountAddress = sequenceCountAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) & VULKAN_HPP_NOEXCEPT { maxDrawCount = maxDrawCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT && setMaxDrawCount( uint32_t maxDrawCount_ ) && VULKAN_HPP_NOEXCEPT { maxDrawCount = maxDrawCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderStages, indirectExecutionSet, indirectCommandsLayout, indirectAddress, indirectAddressSize, preprocessAddress, preprocessSize, maxSequenceCount, sequenceCountAddress, maxDrawCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default; #else bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderStages == rhs.shaderStages ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( indirectAddress == rhs.indirectAddress ) && ( indirectAddressSize == rhs.indirectAddressSize ) && ( preprocessAddress == rhs.preprocessAddress ) && ( preprocessSize == rhs.preprocessSize ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( sequenceCountAddress == rhs.sequenceCountAddress ) && ( maxDrawCount == rhs.maxDrawCount ); # endif } bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsInfoEXT; const void * pNext = {}; ShaderStageFlags shaderStages = {}; IndirectExecutionSetEXT indirectExecutionSet = {}; IndirectCommandsLayoutEXT indirectCommandsLayout = {}; DeviceAddress indirectAddress = {}; DeviceSize indirectAddressSize = {}; DeviceAddress preprocessAddress = {}; DeviceSize preprocessSize = {}; uint32_t maxSequenceCount = {}; DeviceAddress sequenceCountAddress = {}; uint32_t maxDrawCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsInfoEXT; }; #endif template <> struct CppType { using Type = GeneratedCommandsInfoEXT; }; // wrapper struct for struct VkIndirectCommandsStreamNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsStreamNV.html struct IndirectCommandsStreamNV { using NativeType = VkIndirectCommandsStreamNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( Buffer buffer_ = {}, DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT : buffer{ buffer_ } , offset{ offset_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsStreamNV( *reinterpret_cast( &rhs ) ) { } IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsStreamNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsStreamNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsStreamNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( buffer, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsStreamNV const & ) const = default; #else bool operator==( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( buffer == rhs.buffer ) && ( offset == rhs.offset ); # endif } bool operator!=( IndirectCommandsStreamNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Buffer buffer = {}; DeviceSize offset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsStreamNV; }; #endif // wrapper struct for struct VkGeneratedCommandsInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsInfoNV.html struct GeneratedCommandsInfoNV { using NativeType = VkGeneratedCommandsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, Pipeline pipeline_ = {}, IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const IndirectCommandsStreamNV * pStreams_ = {}, uint32_t sequencesCount_ = {}, Buffer preprocessBuffer_ = {}, DeviceSize preprocessOffset_ = {}, DeviceSize preprocessSize_ = {}, Buffer sequencesCountBuffer_ = {}, DeviceSize sequencesCountOffset_ = {}, Buffer sequencesIndexBuffer_ = {}, DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineBindPoint{ pipelineBindPoint_ } , pipeline{ pipeline_ } , indirectCommandsLayout{ indirectCommandsLayout_ } , streamCount{ streamCount_ } , pStreams{ pStreams_ } , sequencesCount{ sequencesCount_ } , preprocessBuffer{ preprocessBuffer_ } , preprocessOffset{ preprocessOffset_ } , preprocessSize{ preprocessSize_ } , sequencesCountBuffer{ sequencesCountBuffer_ } , sequencesCountOffset{ sequencesCountOffset_ } , sequencesIndexBuffer{ sequencesIndexBuffer_ } , sequencesIndexOffset{ sequencesIndexOffset_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GeneratedCommandsInfoNV( PipelineBindPoint pipelineBindPoint_, Pipeline pipeline_, IndirectCommandsLayoutNV indirectCommandsLayout_, ArrayProxyNoTemporaries const & streams_, uint32_t sequencesCount_ = {}, Buffer preprocessBuffer_ = {}, DeviceSize preprocessOffset_ = {}, DeviceSize preprocessSize_ = {}, Buffer sequencesCountBuffer_ = {}, DeviceSize sequencesCountOffset_ = {}, Buffer sequencesIndexBuffer_ = {}, DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , pipelineBindPoint( pipelineBindPoint_ ) , pipeline( pipeline_ ) , indirectCommandsLayout( indirectCommandsLayout_ ) , streamCount( static_cast( streams_.size() ) ) , pStreams( streams_.data() ) , sequencesCount( sequencesCount_ ) , preprocessBuffer( preprocessBuffer_ ) , preprocessOffset( preprocessOffset_ ) , preprocessSize( preprocessSize_ ) , sequencesCountBuffer( sequencesCountBuffer_ ) , sequencesCountOffset( sequencesCountOffset_ ) , sequencesIndexBuffer( sequencesIndexBuffer_ ) , sequencesIndexOffset( sequencesIndexOffset_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) & VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) && VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPipeline( Pipeline pipeline_ ) & VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPipeline( Pipeline pipeline_ ) && VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) & VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) && VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) & VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setStreamCount( uint32_t streamCount_ ) && VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPStreams( const IndirectCommandsStreamNV * pStreams_ ) & VULKAN_HPP_NOEXCEPT { pStreams = pStreams_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPStreams( const IndirectCommandsStreamNV * pStreams_ ) && VULKAN_HPP_NOEXCEPT { pStreams = pStreams_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GeneratedCommandsInfoNV & setStreams( ArrayProxyNoTemporaries const & streams_ ) VULKAN_HPP_NOEXCEPT { streamCount = static_cast( streams_.size() ); pStreams = streams_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) & VULKAN_HPP_NOEXCEPT { sequencesCount = sequencesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setSequencesCount( uint32_t sequencesCount_ ) && VULKAN_HPP_NOEXCEPT { sequencesCount = sequencesCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessBuffer( Buffer preprocessBuffer_ ) & VULKAN_HPP_NOEXCEPT { preprocessBuffer = preprocessBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPreprocessBuffer( Buffer preprocessBuffer_ ) && VULKAN_HPP_NOEXCEPT { preprocessBuffer = preprocessBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessOffset( DeviceSize preprocessOffset_ ) & VULKAN_HPP_NOEXCEPT { preprocessOffset = preprocessOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPreprocessOffset( DeviceSize preprocessOffset_ ) && VULKAN_HPP_NOEXCEPT { preprocessOffset = preprocessOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setPreprocessSize( DeviceSize preprocessSize_ ) & VULKAN_HPP_NOEXCEPT { preprocessSize = preprocessSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setPreprocessSize( DeviceSize preprocessSize_ ) && VULKAN_HPP_NOEXCEPT { preprocessSize = preprocessSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountBuffer( Buffer sequencesCountBuffer_ ) & VULKAN_HPP_NOEXCEPT { sequencesCountBuffer = sequencesCountBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setSequencesCountBuffer( Buffer sequencesCountBuffer_ ) && VULKAN_HPP_NOEXCEPT { sequencesCountBuffer = sequencesCountBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesCountOffset( DeviceSize sequencesCountOffset_ ) & VULKAN_HPP_NOEXCEPT { sequencesCountOffset = sequencesCountOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setSequencesCountOffset( DeviceSize sequencesCountOffset_ ) && VULKAN_HPP_NOEXCEPT { sequencesCountOffset = sequencesCountOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ ) & VULKAN_HPP_NOEXCEPT { sequencesIndexBuffer = sequencesIndexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setSequencesIndexBuffer( Buffer sequencesIndexBuffer_ ) && VULKAN_HPP_NOEXCEPT { sequencesIndexBuffer = sequencesIndexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ ) & VULKAN_HPP_NOEXCEPT { sequencesIndexOffset = sequencesIndexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV && setSequencesIndexOffset( DeviceSize sequencesIndexOffset_ ) && VULKAN_HPP_NOEXCEPT { sequencesIndexOffset = sequencesIndexOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, streamCount, pStreams, sequencesCount, preprocessBuffer, preprocessOffset, preprocessSize, sequencesCountBuffer, sequencesCountOffset, sequencesIndexBuffer, sequencesIndexOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsInfoNV const & ) const = default; #else bool operator==( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( streamCount == rhs.streamCount ) && ( pStreams == rhs.pStreams ) && ( sequencesCount == rhs.sequencesCount ) && ( preprocessBuffer == rhs.preprocessBuffer ) && ( preprocessOffset == rhs.preprocessOffset ) && ( preprocessSize == rhs.preprocessSize ) && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) && ( sequencesCountOffset == rhs.sequencesCountOffset ) && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); # endif } bool operator!=( GeneratedCommandsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsInfoNV; const void * pNext = {}; PipelineBindPoint pipelineBindPoint = PipelineBindPoint::eGraphics; Pipeline pipeline = {}; IndirectCommandsLayoutNV indirectCommandsLayout = {}; uint32_t streamCount = {}; const IndirectCommandsStreamNV * pStreams = {}; uint32_t sequencesCount = {}; Buffer preprocessBuffer = {}; DeviceSize preprocessOffset = {}; DeviceSize preprocessSize = {}; Buffer sequencesCountBuffer = {}; DeviceSize sequencesCountOffset = {}; Buffer sequencesIndexBuffer = {}; DeviceSize sequencesIndexOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsInfoNV; }; #endif template <> struct CppType { using Type = GeneratedCommandsInfoNV; }; // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoEXT.html struct GeneratedCommandsMemoryRequirementsInfoEXT { using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( IndirectExecutionSetEXT indirectExecutionSet_ = {}, IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, uint32_t maxSequenceCount_ = {}, uint32_t maxDrawCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indirectExecutionSet{ indirectExecutionSet_ } , indirectCommandsLayout{ indirectCommandsLayout_ } , maxSequenceCount{ maxSequenceCount_ } , maxDrawCount{ maxDrawCount_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast( &rhs ) ) { } GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) & VULKAN_HPP_NOEXCEPT { indirectExecutionSet = indirectExecutionSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT && setIndirectExecutionSet( IndirectExecutionSetEXT indirectExecutionSet_ ) && VULKAN_HPP_NOEXCEPT { indirectExecutionSet = indirectExecutionSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) & VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT && setIndirectCommandsLayout( IndirectCommandsLayoutEXT indirectCommandsLayout_ ) && VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) & VULKAN_HPP_NOEXCEPT { maxSequenceCount = maxSequenceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT && setMaxSequenceCount( uint32_t maxSequenceCount_ ) && VULKAN_HPP_NOEXCEPT { maxSequenceCount = maxSequenceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) & VULKAN_HPP_NOEXCEPT { maxDrawCount = maxDrawCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT && setMaxDrawCount( uint32_t maxDrawCount_ ) && VULKAN_HPP_NOEXCEPT { maxDrawCount = maxDrawCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default; #else bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( maxDrawCount == rhs.maxDrawCount ); # endif } bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; const void * pNext = {}; IndirectExecutionSetEXT indirectExecutionSet = {}; IndirectCommandsLayoutEXT indirectCommandsLayout = {}; uint32_t maxSequenceCount = {}; uint32_t maxDrawCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsMemoryRequirementsInfoEXT; }; #endif template <> struct CppType { using Type = GeneratedCommandsMemoryRequirementsInfoEXT; }; // wrapper struct for struct VkGeneratedCommandsMemoryRequirementsInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsMemoryRequirementsInfoNV.html struct GeneratedCommandsMemoryRequirementsInfoNV { using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, Pipeline pipeline_ = {}, IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineBindPoint{ pipelineBindPoint_ } , pipeline{ pipeline_ } , indirectCommandsLayout{ indirectCommandsLayout_ } , maxSequencesCount{ maxSequencesCount_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast( &rhs ) ) { } GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) & VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV && setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) && VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( Pipeline pipeline_ ) & VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV && setPipeline( Pipeline pipeline_ ) && VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) & VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV && setIndirectCommandsLayout( IndirectCommandsLayoutNV indirectCommandsLayout_ ) && VULKAN_HPP_NOEXCEPT { indirectCommandsLayout = indirectCommandsLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) & VULKAN_HPP_NOEXCEPT { maxSequencesCount = maxSequencesCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV && setMaxSequencesCount( uint32_t maxSequencesCount_ ) && VULKAN_HPP_NOEXCEPT { maxSequencesCount = maxSequencesCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsMemoryRequirementsInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsMemoryRequirementsInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineBindPoint, pipeline, indirectCommandsLayout, maxSequencesCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const & ) const = default; #else bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( pipeline == rhs.pipeline ) && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequencesCount == rhs.maxSequencesCount ); # endif } bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV; const void * pNext = {}; PipelineBindPoint pipelineBindPoint = PipelineBindPoint::eGraphics; Pipeline pipeline = {}; IndirectCommandsLayoutNV indirectCommandsLayout = {}; uint32_t maxSequencesCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; #endif template <> struct CppType { using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; // wrapper struct for struct VkGeneratedCommandsPipelineInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsPipelineInfoEXT.html struct GeneratedCommandsPipelineInfoEXT { using NativeType = VkGeneratedCommandsPipelineInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsPipelineInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipeline{ pipeline_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsPipelineInfoEXT( *reinterpret_cast( &rhs ) ) { } GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( Pipeline pipeline_ ) & VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT && setPipeline( Pipeline pipeline_ ) && VULKAN_HPP_NOEXCEPT { pipeline = pipeline_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipeline ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default; #else bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); # endif } bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsPipelineInfoEXT; void * pNext = {}; Pipeline pipeline = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsPipelineInfoEXT; }; #endif template <> struct CppType { using Type = GeneratedCommandsPipelineInfoEXT; }; // wrapper struct for struct VkGeneratedCommandsShaderInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGeneratedCommandsShaderInfoEXT.html struct GeneratedCommandsShaderInfoEXT { using NativeType = VkGeneratedCommandsShaderInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsShaderInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( uint32_t shaderCount_ = {}, const ShaderEXT * pShaders_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderCount{ shaderCount_ } , pShaders{ pShaders_ } { } VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : GeneratedCommandsShaderInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GeneratedCommandsShaderInfoEXT( ArrayProxyNoTemporaries const & shaders_, void * pNext_ = nullptr ) : pNext( pNext_ ), shaderCount( static_cast( shaders_.size() ) ), pShaders( shaders_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) & VULKAN_HPP_NOEXCEPT { shaderCount = shaderCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT && setShaderCount( uint32_t shaderCount_ ) && VULKAN_HPP_NOEXCEPT { shaderCount = shaderCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const ShaderEXT * pShaders_ ) & VULKAN_HPP_NOEXCEPT { pShaders = pShaders_; return *this; } VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT && setPShaders( const ShaderEXT * pShaders_ ) && VULKAN_HPP_NOEXCEPT { pShaders = pShaders_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GeneratedCommandsShaderInfoEXT & setShaders( ArrayProxyNoTemporaries const & shaders_ ) VULKAN_HPP_NOEXCEPT { shaderCount = static_cast( shaders_.size() ); pShaders = shaders_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGeneratedCommandsShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGeneratedCommandsShaderInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderCount, pShaders ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default; #else bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pShaders == rhs.pShaders ); # endif } bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGeneratedCommandsShaderInfoEXT; void * pNext = {}; uint32_t shaderCount = {}; const ShaderEXT * pShaders = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GeneratedCommandsShaderInfoEXT; }; #endif template <> struct CppType { using Type = GeneratedCommandsShaderInfoEXT; }; // wrapper struct for struct VkLatencyTimingsFrameReportNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencyTimingsFrameReportNV.html struct LatencyTimingsFrameReportNV { using NativeType = VkLatencyTimingsFrameReportNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencyTimingsFrameReportNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( uint64_t presentID_ = {}, uint64_t inputSampleTimeUs_ = {}, uint64_t simStartTimeUs_ = {}, uint64_t simEndTimeUs_ = {}, uint64_t renderSubmitStartTimeUs_ = {}, uint64_t renderSubmitEndTimeUs_ = {}, uint64_t presentStartTimeUs_ = {}, uint64_t presentEndTimeUs_ = {}, uint64_t driverStartTimeUs_ = {}, uint64_t driverEndTimeUs_ = {}, uint64_t osRenderQueueStartTimeUs_ = {}, uint64_t osRenderQueueEndTimeUs_ = {}, uint64_t gpuRenderStartTimeUs_ = {}, uint64_t gpuRenderEndTimeUs_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentID{ presentID_ } , inputSampleTimeUs{ inputSampleTimeUs_ } , simStartTimeUs{ simStartTimeUs_ } , simEndTimeUs{ simEndTimeUs_ } , renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ } , renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ } , presentStartTimeUs{ presentStartTimeUs_ } , presentEndTimeUs{ presentEndTimeUs_ } , driverStartTimeUs{ driverStartTimeUs_ } , driverEndTimeUs{ driverEndTimeUs_ } , osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ } , osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ } , gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ } , gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ } { } VULKAN_HPP_CONSTEXPR LatencyTimingsFrameReportNV( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; LatencyTimingsFrameReportNV( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencyTimingsFrameReportNV( *reinterpret_cast( &rhs ) ) { } LatencyTimingsFrameReportNV & operator=( LatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LatencyTimingsFrameReportNV & operator=( VkLatencyTimingsFrameReportNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkLatencyTimingsFrameReportNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencyTimingsFrameReportNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencyTimingsFrameReportNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLatencyTimingsFrameReportNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentID, inputSampleTimeUs, simStartTimeUs, simEndTimeUs, renderSubmitStartTimeUs, renderSubmitEndTimeUs, presentStartTimeUs, presentEndTimeUs, driverStartTimeUs, driverEndTimeUs, osRenderQueueStartTimeUs, osRenderQueueEndTimeUs, gpuRenderStartTimeUs, gpuRenderEndTimeUs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LatencyTimingsFrameReportNV const & ) const = default; #else bool operator==( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ) && ( inputSampleTimeUs == rhs.inputSampleTimeUs ) && ( simStartTimeUs == rhs.simStartTimeUs ) && ( simEndTimeUs == rhs.simEndTimeUs ) && ( renderSubmitStartTimeUs == rhs.renderSubmitStartTimeUs ) && ( renderSubmitEndTimeUs == rhs.renderSubmitEndTimeUs ) && ( presentStartTimeUs == rhs.presentStartTimeUs ) && ( presentEndTimeUs == rhs.presentEndTimeUs ) && ( driverStartTimeUs == rhs.driverStartTimeUs ) && ( driverEndTimeUs == rhs.driverEndTimeUs ) && ( osRenderQueueStartTimeUs == rhs.osRenderQueueStartTimeUs ) && ( osRenderQueueEndTimeUs == rhs.osRenderQueueEndTimeUs ) && ( gpuRenderStartTimeUs == rhs.gpuRenderStartTimeUs ) && ( gpuRenderEndTimeUs == rhs.gpuRenderEndTimeUs ); # endif } bool operator!=( LatencyTimingsFrameReportNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLatencyTimingsFrameReportNV; void * pNext = {}; uint64_t presentID = {}; uint64_t inputSampleTimeUs = {}; uint64_t simStartTimeUs = {}; uint64_t simEndTimeUs = {}; uint64_t renderSubmitStartTimeUs = {}; uint64_t renderSubmitEndTimeUs = {}; uint64_t presentStartTimeUs = {}; uint64_t presentEndTimeUs = {}; uint64_t driverStartTimeUs = {}; uint64_t driverEndTimeUs = {}; uint64_t osRenderQueueStartTimeUs = {}; uint64_t osRenderQueueEndTimeUs = {}; uint64_t gpuRenderStartTimeUs = {}; uint64_t gpuRenderEndTimeUs = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LatencyTimingsFrameReportNV; }; #endif template <> struct CppType { using Type = LatencyTimingsFrameReportNV; }; // wrapper struct for struct VkGetLatencyMarkerInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGetLatencyMarkerInfoNV.html struct GetLatencyMarkerInfoNV { using NativeType = VkGetLatencyMarkerInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGetLatencyMarkerInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, LatencyTimingsFrameReportNV * pTimings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , timingCount{ timingCount_ } , pTimings{ pTimings_ } { } VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GetLatencyMarkerInfoNV( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : GetLatencyMarkerInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GetLatencyMarkerInfoNV( ArrayProxyNoTemporaries const & timings_, const void * pNext_ = nullptr ) : pNext( pNext_ ), timingCount( static_cast( timings_.size() ) ), pTimings( timings_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GetLatencyMarkerInfoNV & operator=( GetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GetLatencyMarkerInfoNV & operator=( VkGetLatencyMarkerInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setTimingCount( uint32_t timingCount_ ) & VULKAN_HPP_NOEXCEPT { timingCount = timingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV && setTimingCount( uint32_t timingCount_ ) && VULKAN_HPP_NOEXCEPT { timingCount = timingCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV & setPTimings( LatencyTimingsFrameReportNV * pTimings_ ) & VULKAN_HPP_NOEXCEPT { pTimings = pTimings_; return *this; } VULKAN_HPP_CONSTEXPR_14 GetLatencyMarkerInfoNV && setPTimings( LatencyTimingsFrameReportNV * pTimings_ ) && VULKAN_HPP_NOEXCEPT { pTimings = pTimings_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GetLatencyMarkerInfoNV & setTimings( ArrayProxyNoTemporaries const & timings_ ) VULKAN_HPP_NOEXCEPT { timingCount = static_cast( timings_.size() ); pTimings = timings_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGetLatencyMarkerInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGetLatencyMarkerInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGetLatencyMarkerInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGetLatencyMarkerInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, timingCount, pTimings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GetLatencyMarkerInfoNV const & ) const = default; #else bool operator==( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingCount == rhs.timingCount ) && ( pTimings == rhs.pTimings ); # endif } bool operator!=( GetLatencyMarkerInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGetLatencyMarkerInfoNV; const void * pNext = {}; uint32_t timingCount = {}; LatencyTimingsFrameReportNV * pTimings = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GetLatencyMarkerInfoNV; }; #endif template <> struct CppType { using Type = GetLatencyMarkerInfoNV; }; // wrapper struct for struct VkVertexInputBindingDescription, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputBindingDescription.html struct VertexInputBindingDescription { using NativeType = VkVertexInputBindingDescription; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, uint32_t stride_ = {}, VertexInputRate inputRate_ = VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT : binding{ binding_ } , stride{ stride_ } , inputRate{ inputRate_ } { } VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT : VertexInputBindingDescription( *reinterpret_cast( &rhs ) ) { } VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setBinding( uint32_t binding_ ) & VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription && setBinding( uint32_t binding_ ) && VULKAN_HPP_NOEXCEPT { binding = binding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setStride( uint32_t stride_ ) & VULKAN_HPP_NOEXCEPT { stride = stride_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription && setStride( uint32_t stride_ ) && VULKAN_HPP_NOEXCEPT { stride = stride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & setInputRate( VertexInputRate inputRate_ ) & VULKAN_HPP_NOEXCEPT { inputRate = inputRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription && setInputRate( VertexInputRate inputRate_ ) && VULKAN_HPP_NOEXCEPT { inputRate = inputRate_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVertexInputBindingDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkVertexInputBindingDescription const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkVertexInputBindingDescription *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( binding, stride, inputRate ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VertexInputBindingDescription const & ) const = default; #else bool operator==( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( binding == rhs.binding ) && ( stride == rhs.stride ) && ( inputRate == rhs.inputRate ); # endif } bool operator!=( VertexInputBindingDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t binding = {}; uint32_t stride = {}; VertexInputRate inputRate = VertexInputRate::eVertex; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = VertexInputBindingDescription; }; #endif // wrapper struct for struct VkVertexInputAttributeDescription, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkVertexInputAttributeDescription.html struct VertexInputAttributeDescription { using NativeType = VkVertexInputAttributeDescription; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, uint32_t binding_ = {}, Format format_ = Format::eUndefined, uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT : location{ location_ } , binding{ binding_ } , format{ format_ } , offset{ offset_ } { } VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT : VertexInputAttributeDescription( *reinterpret_cast( &rhs ) ) { } VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setLocation( uint32_t location_ ) & VULKAN_HPP_NOEXCEPT { location = location_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription && setLocation( uint32_t location_ ) && VULKAN_HPP_NOEXCEPT { location = location_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setBinding( uint32_t binding_ ) & VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription && setBinding( uint32_t binding_ ) && VULKAN_HPP_NOEXCEPT { binding = binding_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkVertexInputAttributeDescription const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkVertexInputAttributeDescription const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkVertexInputAttributeDescription *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( location, binding, format, offset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( VertexInputAttributeDescription const & ) const = default; #else bool operator==( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( location == rhs.location ) && ( binding == rhs.binding ) && ( format == rhs.format ) && ( offset == rhs.offset ); # endif } bool operator!=( VertexInputAttributeDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t location = {}; uint32_t binding = {}; Format format = Format::eUndefined; uint32_t offset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = VertexInputAttributeDescription; }; #endif // wrapper struct for struct VkPipelineVertexInputStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineVertexInputStateCreateInfo.html struct PipelineVertexInputStateCreateInfo { using NativeType = VkPipelineVertexInputStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VertexInputBindingDescription * pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ } , pVertexBindingDescriptions{ pVertexBindingDescriptions_ } , vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ } , pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ } { } VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineVertexInputStateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateFlags flags_, ArrayProxyNoTemporaries const & vertexBindingDescriptions_, ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , vertexBindingDescriptionCount( static_cast( vertexBindingDescriptions_.size() ) ) , pVertexBindingDescriptions( vertexBindingDescriptions_.data() ) , vertexAttributeDescriptionCount( static_cast( vertexAttributeDescriptions_.size() ) ) , pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setFlags( PipelineVertexInputStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setFlags( PipelineVertexInputStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) & VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) && VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = vertexBindingDescriptionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VertexInputBindingDescription * pVertexBindingDescriptions_ ) & VULKAN_HPP_NOEXCEPT { pVertexBindingDescriptions = pVertexBindingDescriptions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setPVertexBindingDescriptions( const VertexInputBindingDescription * pVertexBindingDescriptions_ ) && VULKAN_HPP_NOEXCEPT { pVertexBindingDescriptions = pVertexBindingDescriptions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( ArrayProxyNoTemporaries const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDescriptionCount = static_cast( vertexBindingDescriptions_.size() ); pVertexBindingDescriptions = vertexBindingDescriptions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) & VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) && VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) & VULKAN_HPP_NOEXCEPT { pVertexAttributeDescriptions = pVertexAttributeDescriptions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo && setPVertexAttributeDescriptions( const VertexInputAttributeDescription * pVertexAttributeDescriptions_ ) && VULKAN_HPP_NOEXCEPT { pVertexAttributeDescriptions = pVertexAttributeDescriptions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( ArrayProxyNoTemporaries const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeDescriptionCount = static_cast( vertexAttributeDescriptions_.size() ); pVertexAttributeDescriptions = vertexAttributeDescriptions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineVertexInputStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineVertexInputStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineVertexInputStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineVertexInputStateCreateInfo const & ) const = default; #else bool operator==( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); # endif } bool operator!=( PipelineVertexInputStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; const void * pNext = {}; PipelineVertexInputStateCreateFlags flags = {}; uint32_t vertexBindingDescriptionCount = {}; const VertexInputBindingDescription * pVertexBindingDescriptions = {}; uint32_t vertexAttributeDescriptionCount = {}; const VertexInputAttributeDescription * pVertexAttributeDescriptions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineVertexInputStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineVertexInputStateCreateInfo; }; // wrapper struct for struct VkPipelineInputAssemblyStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineInputAssemblyStateCreateInfo.html struct PipelineInputAssemblyStateCreateInfo { using NativeType = VkPipelineInputAssemblyStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateFlags flags_ = {}, PrimitiveTopology topology_ = PrimitiveTopology::ePointList, Bool32 primitiveRestartEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , topology{ topology_ } , primitiveRestartEnable{ primitiveRestartEnable_ } { } VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setFlags( PipelineInputAssemblyStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo && setFlags( PipelineInputAssemblyStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setTopology( PrimitiveTopology topology_ ) & VULKAN_HPP_NOEXCEPT { topology = topology_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo && setTopology( PrimitiveTopology topology_ ) && VULKAN_HPP_NOEXCEPT { topology = topology_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ ) & VULKAN_HPP_NOEXCEPT { primitiveRestartEnable = primitiveRestartEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo && setPrimitiveRestartEnable( Bool32 primitiveRestartEnable_ ) && VULKAN_HPP_NOEXCEPT { primitiveRestartEnable = primitiveRestartEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineInputAssemblyStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineInputAssemblyStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineInputAssemblyStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, topology, primitiveRestartEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineInputAssemblyStateCreateInfo const & ) const = default; #else bool operator==( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( topology == rhs.topology ) && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); # endif } bool operator!=( PipelineInputAssemblyStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; const void * pNext = {}; PipelineInputAssemblyStateCreateFlags flags = {}; PrimitiveTopology topology = PrimitiveTopology::ePointList; Bool32 primitiveRestartEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineInputAssemblyStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineInputAssemblyStateCreateInfo; }; // wrapper struct for struct VkPipelineTessellationStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineTessellationStateCreateInfo.html struct PipelineTessellationStateCreateInfo { using NativeType = VkPipelineTessellationStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , patchControlPoints{ patchControlPoints_ } { } VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineTessellationStateCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setFlags( PipelineTessellationStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo && setFlags( PipelineTessellationStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) & VULKAN_HPP_NOEXCEPT { patchControlPoints = patchControlPoints_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo && setPatchControlPoints( uint32_t patchControlPoints_ ) && VULKAN_HPP_NOEXCEPT { patchControlPoints = patchControlPoints_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineTessellationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineTessellationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineTessellationStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, patchControlPoints ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineTessellationStateCreateInfo const & ) const = default; #else bool operator==( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( patchControlPoints == rhs.patchControlPoints ); # endif } bool operator!=( PipelineTessellationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; const void * pNext = {}; PipelineTessellationStateCreateFlags flags = {}; uint32_t patchControlPoints = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineTessellationStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineTessellationStateCreateInfo; }; // wrapper struct for struct VkPipelineViewportStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineViewportStateCreateInfo.html struct PipelineViewportStateCreateInfo { using NativeType = VkPipelineViewportStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const Viewport * pViewports_ = {}, uint32_t scissorCount_ = {}, const Rect2D * pScissors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , viewportCount{ viewportCount_ } , pViewports{ pViewports_ } , scissorCount{ scissorCount_ } , pScissors{ pScissors_ } { } VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineViewportStateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineViewportStateCreateInfo( PipelineViewportStateCreateFlags flags_, ArrayProxyNoTemporaries const & viewports_, ArrayProxyNoTemporaries const & scissors_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , viewportCount( static_cast( viewports_.size() ) ) , pViewports( viewports_.data() ) , scissorCount( static_cast( scissors_.size() ) ) , pScissors( scissors_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setFlags( PipelineViewportStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setFlags( PipelineViewportStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) & VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setViewportCount( uint32_t viewportCount_ ) && VULKAN_HPP_NOEXCEPT { viewportCount = viewportCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPViewports( const Viewport * pViewports_ ) & VULKAN_HPP_NOEXCEPT { pViewports = pViewports_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setPViewports( const Viewport * pViewports_ ) && VULKAN_HPP_NOEXCEPT { pViewports = pViewports_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineViewportStateCreateInfo & setViewports( ArrayProxyNoTemporaries const & viewports_ ) VULKAN_HPP_NOEXCEPT { viewportCount = static_cast( viewports_.size() ); pViewports = viewports_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) & VULKAN_HPP_NOEXCEPT { scissorCount = scissorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setScissorCount( uint32_t scissorCount_ ) && VULKAN_HPP_NOEXCEPT { scissorCount = scissorCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & setPScissors( const Rect2D * pScissors_ ) & VULKAN_HPP_NOEXCEPT { pScissors = pScissors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo && setPScissors( const Rect2D * pScissors_ ) && VULKAN_HPP_NOEXCEPT { pScissors = pScissors_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineViewportStateCreateInfo & setScissors( ArrayProxyNoTemporaries const & scissors_ ) VULKAN_HPP_NOEXCEPT { scissorCount = static_cast( scissors_.size() ); pScissors = scissors_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineViewportStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineViewportStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineViewportStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, viewportCount, pViewports, scissorCount, pScissors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineViewportStateCreateInfo const & ) const = default; #else bool operator==( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( viewportCount == rhs.viewportCount ) && ( pViewports == rhs.pViewports ) && ( scissorCount == rhs.scissorCount ) && ( pScissors == rhs.pScissors ); # endif } bool operator!=( PipelineViewportStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineViewportStateCreateInfo; const void * pNext = {}; PipelineViewportStateCreateFlags flags = {}; uint32_t viewportCount = {}; const Viewport * pViewports = {}; uint32_t scissorCount = {}; const Rect2D * pScissors = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineViewportStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineViewportStateCreateInfo; }; // wrapper struct for struct VkPipelineRasterizationStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineRasterizationStateCreateInfo.html struct PipelineRasterizationStateCreateInfo { using NativeType = VkPipelineRasterizationStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateFlags flags_ = {}, Bool32 depthClampEnable_ = {}, Bool32 rasterizerDiscardEnable_ = {}, PolygonMode polygonMode_ = PolygonMode::eFill, CullModeFlags cullMode_ = {}, FrontFace frontFace_ = FrontFace::eCounterClockwise, Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , depthClampEnable{ depthClampEnable_ } , rasterizerDiscardEnable{ rasterizerDiscardEnable_ } , polygonMode{ polygonMode_ } , cullMode{ cullMode_ } , frontFace{ frontFace_ } , depthBiasEnable{ depthBiasEnable_ } , depthBiasConstantFactor{ depthBiasConstantFactor_ } , depthBiasClamp{ depthBiasClamp_ } , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } , lineWidth{ lineWidth_ } { } VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineRasterizationStateCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFlags( PipelineRasterizationStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setFlags( PipelineRasterizationStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthClampEnable( Bool32 depthClampEnable_ ) & VULKAN_HPP_NOEXCEPT { depthClampEnable = depthClampEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setDepthClampEnable( Bool32 depthClampEnable_ ) && VULKAN_HPP_NOEXCEPT { depthClampEnable = depthClampEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ ) & VULKAN_HPP_NOEXCEPT { rasterizerDiscardEnable = rasterizerDiscardEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setRasterizerDiscardEnable( Bool32 rasterizerDiscardEnable_ ) && VULKAN_HPP_NOEXCEPT { rasterizerDiscardEnable = rasterizerDiscardEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setPolygonMode( PolygonMode polygonMode_ ) & VULKAN_HPP_NOEXCEPT { polygonMode = polygonMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setPolygonMode( PolygonMode polygonMode_ ) && VULKAN_HPP_NOEXCEPT { polygonMode = polygonMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setCullMode( CullModeFlags cullMode_ ) & VULKAN_HPP_NOEXCEPT { cullMode = cullMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setCullMode( CullModeFlags cullMode_ ) && VULKAN_HPP_NOEXCEPT { cullMode = cullMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setFrontFace( FrontFace frontFace_ ) & VULKAN_HPP_NOEXCEPT { frontFace = frontFace_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setFrontFace( FrontFace frontFace_ ) && VULKAN_HPP_NOEXCEPT { frontFace = frontFace_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasEnable( Bool32 depthBiasEnable_ ) & VULKAN_HPP_NOEXCEPT { depthBiasEnable = depthBiasEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setDepthBiasEnable( Bool32 depthBiasEnable_ ) && VULKAN_HPP_NOEXCEPT { depthBiasEnable = depthBiasEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) & VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) && VULKAN_HPP_NOEXCEPT { depthBiasConstantFactor = depthBiasConstantFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) & VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setDepthBiasClamp( float depthBiasClamp_ ) && VULKAN_HPP_NOEXCEPT { depthBiasClamp = depthBiasClamp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) & VULKAN_HPP_NOEXCEPT { depthBiasSlopeFactor = depthBiasSlopeFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) && VULKAN_HPP_NOEXCEPT { depthBiasSlopeFactor = depthBiasSlopeFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) & VULKAN_HPP_NOEXCEPT { lineWidth = lineWidth_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo && setLineWidth( float lineWidth_ ) && VULKAN_HPP_NOEXCEPT { lineWidth = lineWidth_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineRasterizationStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineRasterizationStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineRasterizationStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, depthClampEnable, rasterizerDiscardEnable, polygonMode, cullMode, frontFace, depthBiasEnable, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor, lineWidth ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineRasterizationStateCreateInfo const & ) const = default; #else bool operator==( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthClampEnable == rhs.depthClampEnable ) && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) && ( polygonMode == rhs.polygonMode ) && ( cullMode == rhs.cullMode ) && ( frontFace == rhs.frontFace ) && ( depthBiasEnable == rhs.depthBiasEnable ) && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) && ( depthBiasClamp == rhs.depthBiasClamp ) && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) && ( lineWidth == rhs.lineWidth ); # endif } bool operator!=( PipelineRasterizationStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; const void * pNext = {}; PipelineRasterizationStateCreateFlags flags = {}; Bool32 depthClampEnable = {}; Bool32 rasterizerDiscardEnable = {}; PolygonMode polygonMode = PolygonMode::eFill; CullModeFlags cullMode = {}; FrontFace frontFace = FrontFace::eCounterClockwise; Bool32 depthBiasEnable = {}; float depthBiasConstantFactor = {}; float depthBiasClamp = {}; float depthBiasSlopeFactor = {}; float lineWidth = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineRasterizationStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineRasterizationStateCreateInfo; }; // wrapper struct for struct VkPipelineMultisampleStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineMultisampleStateCreateInfo.html struct PipelineMultisampleStateCreateInfo { using NativeType = VkPipelineMultisampleStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateFlags flags_ = {}, SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const SampleMask * pSampleMask_ = {}, Bool32 alphaToCoverageEnable_ = {}, Bool32 alphaToOneEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , rasterizationSamples{ rasterizationSamples_ } , sampleShadingEnable{ sampleShadingEnable_ } , minSampleShading{ minSampleShading_ } , pSampleMask{ pSampleMask_ } , alphaToCoverageEnable{ alphaToCoverageEnable_ } , alphaToOneEnable{ alphaToOneEnable_ } { } VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineMultisampleStateCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setFlags( PipelineMultisampleStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setFlags( PipelineMultisampleStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) & VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) && VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setSampleShadingEnable( Bool32 sampleShadingEnable_ ) & VULKAN_HPP_NOEXCEPT { sampleShadingEnable = sampleShadingEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setSampleShadingEnable( Bool32 sampleShadingEnable_ ) && VULKAN_HPP_NOEXCEPT { sampleShadingEnable = sampleShadingEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) & VULKAN_HPP_NOEXCEPT { minSampleShading = minSampleShading_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setMinSampleShading( float minSampleShading_ ) && VULKAN_HPP_NOEXCEPT { minSampleShading = minSampleShading_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setPSampleMask( const SampleMask * pSampleMask_ ) & VULKAN_HPP_NOEXCEPT { pSampleMask = pSampleMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setPSampleMask( const SampleMask * pSampleMask_ ) && VULKAN_HPP_NOEXCEPT { pSampleMask = pSampleMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ ) & VULKAN_HPP_NOEXCEPT { alphaToCoverageEnable = alphaToCoverageEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setAlphaToCoverageEnable( Bool32 alphaToCoverageEnable_ ) && VULKAN_HPP_NOEXCEPT { alphaToCoverageEnable = alphaToCoverageEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( Bool32 alphaToOneEnable_ ) & VULKAN_HPP_NOEXCEPT { alphaToOneEnable = alphaToOneEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo && setAlphaToOneEnable( Bool32 alphaToOneEnable_ ) && VULKAN_HPP_NOEXCEPT { alphaToOneEnable = alphaToOneEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineMultisampleStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineMultisampleStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineMultisampleStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, rasterizationSamples, sampleShadingEnable, minSampleShading, pSampleMask, alphaToCoverageEnable, alphaToOneEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineMultisampleStateCreateInfo const & ) const = default; #else bool operator==( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( rasterizationSamples == rhs.rasterizationSamples ) && ( sampleShadingEnable == rhs.sampleShadingEnable ) && ( minSampleShading == rhs.minSampleShading ) && ( pSampleMask == rhs.pSampleMask ) && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) && ( alphaToOneEnable == rhs.alphaToOneEnable ); # endif } bool operator!=( PipelineMultisampleStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; const void * pNext = {}; PipelineMultisampleStateCreateFlags flags = {}; SampleCountFlagBits rasterizationSamples = SampleCountFlagBits::e1; Bool32 sampleShadingEnable = {}; float minSampleShading = {}; const SampleMask * pSampleMask = {}; Bool32 alphaToCoverageEnable = {}; Bool32 alphaToOneEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineMultisampleStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineMultisampleStateCreateInfo; }; // wrapper struct for struct VkStencilOpState, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkStencilOpState.html struct StencilOpState { using NativeType = VkStencilOpState; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR StencilOpState( StencilOp failOp_ = StencilOp::eKeep, StencilOp passOp_ = StencilOp::eKeep, StencilOp depthFailOp_ = StencilOp::eKeep, CompareOp compareOp_ = CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT : failOp{ failOp_ } , passOp{ passOp_ } , depthFailOp{ depthFailOp_ } , compareOp{ compareOp_ } , compareMask{ compareMask_ } , writeMask{ writeMask_ } , reference{ reference_ } { } VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT : StencilOpState( *reinterpret_cast( &rhs ) ) {} StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 StencilOpState & setFailOp( StencilOp failOp_ ) & VULKAN_HPP_NOEXCEPT { failOp = failOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setFailOp( StencilOp failOp_ ) && VULKAN_HPP_NOEXCEPT { failOp = failOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setPassOp( StencilOp passOp_ ) & VULKAN_HPP_NOEXCEPT { passOp = passOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setPassOp( StencilOp passOp_ ) && VULKAN_HPP_NOEXCEPT { passOp = passOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setDepthFailOp( StencilOp depthFailOp_ ) & VULKAN_HPP_NOEXCEPT { depthFailOp = depthFailOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setDepthFailOp( StencilOp depthFailOp_ ) && VULKAN_HPP_NOEXCEPT { depthFailOp = depthFailOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareOp( CompareOp compareOp_ ) & VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setCompareOp( CompareOp compareOp_ ) && VULKAN_HPP_NOEXCEPT { compareOp = compareOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setCompareMask( uint32_t compareMask_ ) & VULKAN_HPP_NOEXCEPT { compareMask = compareMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setCompareMask( uint32_t compareMask_ ) && VULKAN_HPP_NOEXCEPT { compareMask = compareMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setWriteMask( uint32_t writeMask_ ) & VULKAN_HPP_NOEXCEPT { writeMask = writeMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setWriteMask( uint32_t writeMask_ ) && VULKAN_HPP_NOEXCEPT { writeMask = writeMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 StencilOpState & setReference( uint32_t reference_ ) & VULKAN_HPP_NOEXCEPT { reference = reference_; return *this; } VULKAN_HPP_CONSTEXPR_14 StencilOpState && setReference( uint32_t reference_ ) && VULKAN_HPP_NOEXCEPT { reference = reference_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkStencilOpState const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkStencilOpState const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkStencilOpState *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( failOp, passOp, depthFailOp, compareOp, compareMask, writeMask, reference ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( StencilOpState const & ) const = default; #else bool operator==( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( failOp == rhs.failOp ) && ( passOp == rhs.passOp ) && ( depthFailOp == rhs.depthFailOp ) && ( compareOp == rhs.compareOp ) && ( compareMask == rhs.compareMask ) && ( writeMask == rhs.writeMask ) && ( reference == rhs.reference ); # endif } bool operator!=( StencilOpState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StencilOp failOp = StencilOp::eKeep; StencilOp passOp = StencilOp::eKeep; StencilOp depthFailOp = StencilOp::eKeep; CompareOp compareOp = CompareOp::eNever; uint32_t compareMask = {}; uint32_t writeMask = {}; uint32_t reference = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = StencilOpState; }; #endif // wrapper struct for struct VkPipelineDepthStencilStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDepthStencilStateCreateInfo.html struct PipelineDepthStencilStateCreateInfo { using NativeType = VkPipelineDepthStencilStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateFlags flags_ = {}, Bool32 depthTestEnable_ = {}, Bool32 depthWriteEnable_ = {}, CompareOp depthCompareOp_ = CompareOp::eNever, Bool32 depthBoundsTestEnable_ = {}, Bool32 stencilTestEnable_ = {}, StencilOpState front_ = {}, StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , depthTestEnable{ depthTestEnable_ } , depthWriteEnable{ depthWriteEnable_ } , depthCompareOp{ depthCompareOp_ } , depthBoundsTestEnable{ depthBoundsTestEnable_ } , stencilTestEnable{ stencilTestEnable_ } , front{ front_ } , back{ back_ } , minDepthBounds{ minDepthBounds_ } , maxDepthBounds{ maxDepthBounds_ } { } VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineDepthStencilStateCreateInfo( *reinterpret_cast( &rhs ) ) { } PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFlags( PipelineDepthStencilStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setFlags( PipelineDepthStencilStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthTestEnable( Bool32 depthTestEnable_ ) & VULKAN_HPP_NOEXCEPT { depthTestEnable = depthTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setDepthTestEnable( Bool32 depthTestEnable_ ) && VULKAN_HPP_NOEXCEPT { depthTestEnable = depthTestEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( Bool32 depthWriteEnable_ ) & VULKAN_HPP_NOEXCEPT { depthWriteEnable = depthWriteEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setDepthWriteEnable( Bool32 depthWriteEnable_ ) && VULKAN_HPP_NOEXCEPT { depthWriteEnable = depthWriteEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthCompareOp( CompareOp depthCompareOp_ ) & VULKAN_HPP_NOEXCEPT { depthCompareOp = depthCompareOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setDepthCompareOp( CompareOp depthCompareOp_ ) && VULKAN_HPP_NOEXCEPT { depthCompareOp = depthCompareOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ ) & VULKAN_HPP_NOEXCEPT { depthBoundsTestEnable = depthBoundsTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setDepthBoundsTestEnable( Bool32 depthBoundsTestEnable_ ) && VULKAN_HPP_NOEXCEPT { depthBoundsTestEnable = depthBoundsTestEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setStencilTestEnable( Bool32 stencilTestEnable_ ) & VULKAN_HPP_NOEXCEPT { stencilTestEnable = stencilTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setStencilTestEnable( Bool32 stencilTestEnable_ ) && VULKAN_HPP_NOEXCEPT { stencilTestEnable = stencilTestEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setFront( StencilOpState const & front_ ) & VULKAN_HPP_NOEXCEPT { front = front_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setFront( StencilOpState const & front_ ) && VULKAN_HPP_NOEXCEPT { front = front_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setBack( StencilOpState const & back_ ) & VULKAN_HPP_NOEXCEPT { back = back_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setBack( StencilOpState const & back_ ) && VULKAN_HPP_NOEXCEPT { back = back_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) & VULKAN_HPP_NOEXCEPT { minDepthBounds = minDepthBounds_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setMinDepthBounds( float minDepthBounds_ ) && VULKAN_HPP_NOEXCEPT { minDepthBounds = minDepthBounds_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) & VULKAN_HPP_NOEXCEPT { maxDepthBounds = maxDepthBounds_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo && setMaxDepthBounds( float maxDepthBounds_ ) && VULKAN_HPP_NOEXCEPT { maxDepthBounds = maxDepthBounds_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineDepthStencilStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineDepthStencilStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineDepthStencilStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, depthTestEnable, depthWriteEnable, depthCompareOp, depthBoundsTestEnable, stencilTestEnable, front, back, minDepthBounds, maxDepthBounds ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineDepthStencilStateCreateInfo const & ) const = default; #else bool operator==( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( depthTestEnable == rhs.depthTestEnable ) && ( depthWriteEnable == rhs.depthWriteEnable ) && ( depthCompareOp == rhs.depthCompareOp ) && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) && ( stencilTestEnable == rhs.stencilTestEnable ) && ( front == rhs.front ) && ( back == rhs.back ) && ( minDepthBounds == rhs.minDepthBounds ) && ( maxDepthBounds == rhs.maxDepthBounds ); # endif } bool operator!=( PipelineDepthStencilStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; const void * pNext = {}; PipelineDepthStencilStateCreateFlags flags = {}; Bool32 depthTestEnable = {}; Bool32 depthWriteEnable = {}; CompareOp depthCompareOp = CompareOp::eNever; Bool32 depthBoundsTestEnable = {}; Bool32 stencilTestEnable = {}; StencilOpState front = {}; StencilOpState back = {}; float minDepthBounds = {}; float maxDepthBounds = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineDepthStencilStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineDepthStencilStateCreateInfo; }; // wrapper struct for struct VkPipelineColorBlendAttachmentState, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendAttachmentState.html struct PipelineColorBlendAttachmentState { using NativeType = VkPipelineColorBlendAttachmentState; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( Bool32 blendEnable_ = {}, BlendFactor srcColorBlendFactor_ = BlendFactor::eZero, BlendFactor dstColorBlendFactor_ = BlendFactor::eZero, BlendOp colorBlendOp_ = BlendOp::eAdd, BlendFactor srcAlphaBlendFactor_ = BlendFactor::eZero, BlendFactor dstAlphaBlendFactor_ = BlendFactor::eZero, BlendOp alphaBlendOp_ = BlendOp::eAdd, ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT : blendEnable{ blendEnable_ } , srcColorBlendFactor{ srcColorBlendFactor_ } , dstColorBlendFactor{ dstColorBlendFactor_ } , colorBlendOp{ colorBlendOp_ } , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } , alphaBlendOp{ alphaBlendOp_ } , colorWriteMask{ colorWriteMask_ } { } VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineColorBlendAttachmentState( *reinterpret_cast( &rhs ) ) { } PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setBlendEnable( Bool32 blendEnable_ ) & VULKAN_HPP_NOEXCEPT { blendEnable = blendEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setBlendEnable( Bool32 blendEnable_ ) && VULKAN_HPP_NOEXCEPT { blendEnable = blendEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setSrcColorBlendFactor( BlendFactor srcColorBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { srcColorBlendFactor = srcColorBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setDstColorBlendFactor( BlendFactor dstColorBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { dstColorBlendFactor = dstColorBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorBlendOp( BlendOp colorBlendOp_ ) & VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setColorBlendOp( BlendOp colorBlendOp_ ) && VULKAN_HPP_NOEXCEPT { colorBlendOp = colorBlendOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setSrcAlphaBlendFactor( BlendFactor srcAlphaBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { srcAlphaBlendFactor = srcAlphaBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) & VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setDstAlphaBlendFactor( BlendFactor dstAlphaBlendFactor_ ) && VULKAN_HPP_NOEXCEPT { dstAlphaBlendFactor = dstAlphaBlendFactor_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setAlphaBlendOp( BlendOp alphaBlendOp_ ) & VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setAlphaBlendOp( BlendOp alphaBlendOp_ ) && VULKAN_HPP_NOEXCEPT { alphaBlendOp = alphaBlendOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & setColorWriteMask( ColorComponentFlags colorWriteMask_ ) & VULKAN_HPP_NOEXCEPT { colorWriteMask = colorWriteMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState && setColorWriteMask( ColorComponentFlags colorWriteMask_ ) && VULKAN_HPP_NOEXCEPT { colorWriteMask = colorWriteMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineColorBlendAttachmentState const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineColorBlendAttachmentState const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineColorBlendAttachmentState *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( blendEnable, srcColorBlendFactor, dstColorBlendFactor, colorBlendOp, srcAlphaBlendFactor, dstAlphaBlendFactor, alphaBlendOp, colorWriteMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineColorBlendAttachmentState const & ) const = default; #else bool operator==( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( blendEnable == rhs.blendEnable ) && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) && ( colorBlendOp == rhs.colorBlendOp ) && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) && ( alphaBlendOp == rhs.alphaBlendOp ) && ( colorWriteMask == rhs.colorWriteMask ); # endif } bool operator!=( PipelineColorBlendAttachmentState const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Bool32 blendEnable = {}; BlendFactor srcColorBlendFactor = BlendFactor::eZero; BlendFactor dstColorBlendFactor = BlendFactor::eZero; BlendOp colorBlendOp = BlendOp::eAdd; BlendFactor srcAlphaBlendFactor = BlendFactor::eZero; BlendFactor dstAlphaBlendFactor = BlendFactor::eZero; BlendOp alphaBlendOp = BlendOp::eAdd; ColorComponentFlags colorWriteMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineColorBlendAttachmentState; }; #endif // wrapper struct for struct VkPipelineColorBlendStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineColorBlendStateCreateInfo.html struct PipelineColorBlendStateCreateInfo { using NativeType = VkPipelineColorBlendStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_ = {}, Bool32 logicOpEnable_ = {}, LogicOp logicOp_ = LogicOp::eClear, uint32_t attachmentCount_ = {}, const PipelineColorBlendAttachmentState * pAttachments_ = {}, std::array const & blendConstants_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , logicOpEnable{ logicOpEnable_ } , logicOp{ logicOp_ } , attachmentCount{ attachmentCount_ } , pAttachments{ pAttachments_ } , blendConstants{ blendConstants_ } { } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineColorBlendStateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateFlags flags_, Bool32 logicOpEnable_, LogicOp logicOp_, ArrayProxyNoTemporaries const & attachments_, std::array const & blendConstants_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , logicOpEnable( logicOpEnable_ ) , logicOp( logicOp_ ) , attachmentCount( static_cast( attachments_.size() ) ) , pAttachments( attachments_.data() ) , blendConstants( blendConstants_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setFlags( PipelineColorBlendStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setFlags( PipelineColorBlendStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOpEnable( Bool32 logicOpEnable_ ) & VULKAN_HPP_NOEXCEPT { logicOpEnable = logicOpEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setLogicOpEnable( Bool32 logicOpEnable_ ) && VULKAN_HPP_NOEXCEPT { logicOpEnable = logicOpEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setLogicOp( LogicOp logicOp_ ) & VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setLogicOp( LogicOp logicOp_ ) && VULKAN_HPP_NOEXCEPT { logicOp = logicOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) & VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setAttachmentCount( uint32_t attachmentCount_ ) && VULKAN_HPP_NOEXCEPT { attachmentCount = attachmentCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setPAttachments( const PipelineColorBlendAttachmentState * pAttachments_ ) & VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setPAttachments( const PipelineColorBlendAttachmentState * pAttachments_ ) && VULKAN_HPP_NOEXCEPT { pAttachments = pAttachments_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineColorBlendStateCreateInfo & setAttachments( ArrayProxyNoTemporaries const & attachments_ ) VULKAN_HPP_NOEXCEPT { attachmentCount = static_cast( attachments_.size() ); pAttachments = attachments_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) & VULKAN_HPP_NOEXCEPT { blendConstants = blendConstants_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo && setBlendConstants( std::array blendConstants_ ) && VULKAN_HPP_NOEXCEPT { blendConstants = blendConstants_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineColorBlendStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineColorBlendStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineColorBlendStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, logicOpEnable, logicOp, attachmentCount, pAttachments, blendConstants ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineColorBlendStateCreateInfo const & ) const = default; #else bool operator==( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( logicOpEnable == rhs.logicOpEnable ) && ( logicOp == rhs.logicOp ) && ( attachmentCount == rhs.attachmentCount ) && ( pAttachments == rhs.pAttachments ) && ( blendConstants == rhs.blendConstants ); # endif } bool operator!=( PipelineColorBlendStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; const void * pNext = {}; PipelineColorBlendStateCreateFlags flags = {}; Bool32 logicOpEnable = {}; LogicOp logicOp = LogicOp::eClear; uint32_t attachmentCount = {}; const PipelineColorBlendAttachmentState * pAttachments = {}; ArrayWrapper1D blendConstants = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineColorBlendStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineColorBlendStateCreateInfo; }; // wrapper struct for struct VkPipelineDynamicStateCreateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPipelineDynamicStateCreateInfo.html struct PipelineDynamicStateCreateInfo { using NativeType = VkPipelineDynamicStateCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const DynamicState * pDynamicStates_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , dynamicStateCount{ dynamicStateCount_ } , pDynamicStates{ pDynamicStates_ } { } VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PipelineDynamicStateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateFlags flags_, ArrayProxyNoTemporaries const & dynamicStates_, const void * pNext_ = nullptr ) : pNext( pNext_ ), flags( flags_ ), dynamicStateCount( static_cast( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setFlags( PipelineDynamicStateCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo && setFlags( PipelineDynamicStateCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) & VULKAN_HPP_NOEXCEPT { dynamicStateCount = dynamicStateCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo && setDynamicStateCount( uint32_t dynamicStateCount_ ) && VULKAN_HPP_NOEXCEPT { dynamicStateCount = dynamicStateCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & setPDynamicStates( const DynamicState * pDynamicStates_ ) & VULKAN_HPP_NOEXCEPT { pDynamicStates = pDynamicStates_; return *this; } VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo && setPDynamicStates( const DynamicState * pDynamicStates_ ) && VULKAN_HPP_NOEXCEPT { pDynamicStates = pDynamicStates_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PipelineDynamicStateCreateInfo & setDynamicStates( ArrayProxyNoTemporaries const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT { dynamicStateCount = static_cast( dynamicStates_.size() ); pDynamicStates = dynamicStates_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPipelineDynamicStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPipelineDynamicStateCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPipelineDynamicStateCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, dynamicStateCount, pDynamicStates ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PipelineDynamicStateCreateInfo const & ) const = default; #else bool operator==( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( dynamicStateCount == rhs.dynamicStateCount ) && ( pDynamicStates == rhs.pDynamicStates ); # endif } bool operator!=( PipelineDynamicStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; const void * pNext = {}; PipelineDynamicStateCreateFlags flags = {}; uint32_t dynamicStateCount = {}; const DynamicState * pDynamicStates = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PipelineDynamicStateCreateInfo; }; #endif template <> struct CppType { using Type = PipelineDynamicStateCreateInfo; }; // wrapper struct for struct VkGraphicsPipelineCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineCreateInfo.html struct GraphicsPipelineCreateInfo { using NativeType = VkGraphicsPipelineCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const PipelineShaderStageCreateInfo * pStages_ = {}, const PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const PipelineViewportStateCreateInfo * pViewportState_ = {}, const PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const PipelineDynamicStateCreateInfo * pDynamicState_ = {}, PipelineLayout layout_ = {}, RenderPass renderPass_ = {}, uint32_t subpass_ = {}, Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , stageCount{ stageCount_ } , pStages{ pStages_ } , pVertexInputState{ pVertexInputState_ } , pInputAssemblyState{ pInputAssemblyState_ } , pTessellationState{ pTessellationState_ } , pViewportState{ pViewportState_ } , pRasterizationState{ pRasterizationState_ } , pMultisampleState{ pMultisampleState_ } , pDepthStencilState{ pDepthStencilState_ } , pColorBlendState{ pColorBlendState_ } , pDynamicState{ pDynamicState_ } , layout{ layout_ } , renderPass{ renderPass_ } , subpass{ subpass_ } , basePipelineHandle{ basePipelineHandle_ } , basePipelineIndex{ basePipelineIndex_ } { } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : GraphicsPipelineCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsPipelineCreateInfo( PipelineCreateFlags flags_, ArrayProxyNoTemporaries const & stages_, const PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ = {}, const PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const PipelineViewportStateCreateInfo * pViewportState_ = {}, const PipelineRasterizationStateCreateInfo * pRasterizationState_ = {}, const PipelineMultisampleStateCreateInfo * pMultisampleState_ = {}, const PipelineDepthStencilStateCreateInfo * pDepthStencilState_ = {}, const PipelineColorBlendStateCreateInfo * pColorBlendState_ = {}, const PipelineDynamicStateCreateInfo * pDynamicState_ = {}, PipelineLayout layout_ = {}, RenderPass renderPass_ = {}, uint32_t subpass_ = {}, Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , stageCount( static_cast( stages_.size() ) ) , pStages( stages_.data() ) , pVertexInputState( pVertexInputState_ ) , pInputAssemblyState( pInputAssemblyState_ ) , pTessellationState( pTessellationState_ ) , pViewportState( pViewportState_ ) , pRasterizationState( pRasterizationState_ ) , pMultisampleState( pMultisampleState_ ) , pDepthStencilState( pDepthStencilState_ ) , pColorBlendState( pColorBlendState_ ) , pDynamicState( pDynamicState_ ) , layout( layout_ ) , renderPass( renderPass_ ) , subpass( subpass_ ) , basePipelineHandle( basePipelineHandle_ ) , basePipelineIndex( basePipelineIndex_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setFlags( PipelineCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setFlags( PipelineCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) & VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setStageCount( uint32_t stageCount_ ) && VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) & VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPStages( const PipelineShaderStageCreateInfo * pStages_ ) && VULKAN_HPP_NOEXCEPT { pStages = pStages_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsPipelineCreateInfo & setStages( ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { stageCount = static_cast( stages_.size() ); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) & VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) && VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) & VULKAN_HPP_NOEXCEPT { pInputAssemblyState = pInputAssemblyState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPInputAssemblyState( const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState_ ) && VULKAN_HPP_NOEXCEPT { pInputAssemblyState = pInputAssemblyState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) & VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) && VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPViewportState( const PipelineViewportStateCreateInfo * pViewportState_ ) & VULKAN_HPP_NOEXCEPT { pViewportState = pViewportState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPViewportState( const PipelineViewportStateCreateInfo * pViewportState_ ) && VULKAN_HPP_NOEXCEPT { pViewportState = pViewportState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPRasterizationState( const PipelineRasterizationStateCreateInfo * pRasterizationState_ ) & VULKAN_HPP_NOEXCEPT { pRasterizationState = pRasterizationState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPRasterizationState( const PipelineRasterizationStateCreateInfo * pRasterizationState_ ) && VULKAN_HPP_NOEXCEPT { pRasterizationState = pRasterizationState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPMultisampleState( const PipelineMultisampleStateCreateInfo * pMultisampleState_ ) & VULKAN_HPP_NOEXCEPT { pMultisampleState = pMultisampleState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPMultisampleState( const PipelineMultisampleStateCreateInfo * pMultisampleState_ ) && VULKAN_HPP_NOEXCEPT { pMultisampleState = pMultisampleState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDepthStencilState( const PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) & VULKAN_HPP_NOEXCEPT { pDepthStencilState = pDepthStencilState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPDepthStencilState( const PipelineDepthStencilStateCreateInfo * pDepthStencilState_ ) && VULKAN_HPP_NOEXCEPT { pDepthStencilState = pDepthStencilState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPColorBlendState( const PipelineColorBlendStateCreateInfo * pColorBlendState_ ) & VULKAN_HPP_NOEXCEPT { pColorBlendState = pColorBlendState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPColorBlendState( const PipelineColorBlendStateCreateInfo * pColorBlendState_ ) && VULKAN_HPP_NOEXCEPT { pColorBlendState = pColorBlendState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setPDynamicState( const PipelineDynamicStateCreateInfo * pDynamicState_ ) & VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setPDynamicState( const PipelineDynamicStateCreateInfo * pDynamicState_ ) && VULKAN_HPP_NOEXCEPT { pDynamicState = pDynamicState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setLayout( PipelineLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setLayout( PipelineLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setRenderPass( RenderPass renderPass_ ) & VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setRenderPass( RenderPass renderPass_ ) && VULKAN_HPP_NOEXCEPT { renderPass = renderPass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) & VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setSubpass( uint32_t subpass_ ) && VULKAN_HPP_NOEXCEPT { subpass = subpass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineHandle( Pipeline basePipelineHandle_ ) & VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setBasePipelineHandle( Pipeline basePipelineHandle_ ) && VULKAN_HPP_NOEXCEPT { basePipelineHandle = basePipelineHandle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) & VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo && setBasePipelineIndex( int32_t basePipelineIndex_ ) && VULKAN_HPP_NOEXCEPT { basePipelineIndex = basePipelineIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGraphicsPipelineCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGraphicsPipelineCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, stageCount, pStages, pVertexInputState, pInputAssemblyState, pTessellationState, pViewportState, pRasterizationState, pMultisampleState, pDepthStencilState, pColorBlendState, pDynamicState, layout, renderPass, subpass, basePipelineHandle, basePipelineIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GraphicsPipelineCreateInfo const & ) const = default; #else bool operator==( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && ( pInputAssemblyState == rhs.pInputAssemblyState ) && ( pTessellationState == rhs.pTessellationState ) && ( pViewportState == rhs.pViewportState ) && ( pRasterizationState == rhs.pRasterizationState ) && ( pMultisampleState == rhs.pMultisampleState ) && ( pDepthStencilState == rhs.pDepthStencilState ) && ( pColorBlendState == rhs.pColorBlendState ) && ( pDynamicState == rhs.pDynamicState ) && ( layout == rhs.layout ) && ( renderPass == rhs.renderPass ) && ( subpass == rhs.subpass ) && ( basePipelineHandle == rhs.basePipelineHandle ) && ( basePipelineIndex == rhs.basePipelineIndex ); # endif } bool operator!=( GraphicsPipelineCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGraphicsPipelineCreateInfo; const void * pNext = {}; PipelineCreateFlags flags = {}; uint32_t stageCount = {}; const PipelineShaderStageCreateInfo * pStages = {}; const PipelineVertexInputStateCreateInfo * pVertexInputState = {}; const PipelineInputAssemblyStateCreateInfo * pInputAssemblyState = {}; const PipelineTessellationStateCreateInfo * pTessellationState = {}; const PipelineViewportStateCreateInfo * pViewportState = {}; const PipelineRasterizationStateCreateInfo * pRasterizationState = {}; const PipelineMultisampleStateCreateInfo * pMultisampleState = {}; const PipelineDepthStencilStateCreateInfo * pDepthStencilState = {}; const PipelineColorBlendStateCreateInfo * pColorBlendState = {}; const PipelineDynamicStateCreateInfo * pDynamicState = {}; PipelineLayout layout = {}; RenderPass renderPass = {}; uint32_t subpass = {}; Pipeline basePipelineHandle = {}; int32_t basePipelineIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GraphicsPipelineCreateInfo; }; #endif template <> struct CppType { using Type = GraphicsPipelineCreateInfo; }; // wrapper struct for struct VkGraphicsPipelineLibraryCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineLibraryCreateInfoEXT.html struct GraphicsPipelineLibraryCreateInfoEXT { using NativeType = VkGraphicsPipelineLibraryCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineLibraryCreateInfoEXT( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : GraphicsPipelineLibraryCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } GraphicsPipelineLibraryCreateInfoEXT & operator=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GraphicsPipelineLibraryCreateInfoEXT & operator=( VkGraphicsPipelineLibraryCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT & setFlags( GraphicsPipelineLibraryFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineLibraryCreateInfoEXT && setFlags( GraphicsPipelineLibraryFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGraphicsPipelineLibraryCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineLibraryCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineLibraryCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGraphicsPipelineLibraryCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GraphicsPipelineLibraryCreateInfoEXT const & ) const = default; #else bool operator==( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( GraphicsPipelineLibraryCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGraphicsPipelineLibraryCreateInfoEXT; const void * pNext = {}; GraphicsPipelineLibraryFlagsEXT flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GraphicsPipelineLibraryCreateInfoEXT; }; #endif template <> struct CppType { using Type = GraphicsPipelineLibraryCreateInfoEXT; }; // wrapper struct for struct VkGraphicsShaderGroupCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsShaderGroupCreateInfoNV.html struct GraphicsShaderGroupCreateInfoNV { using NativeType = VkGraphicsShaderGroupCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( uint32_t stageCount_ = {}, const PipelineShaderStageCreateInfo * pStages_ = {}, const PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stageCount{ stageCount_ } , pStages{ pStages_ } , pVertexInputState{ pVertexInputState_ } , pTessellationState{ pTessellationState_ } { } VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsShaderGroupCreateInfoNV( ArrayProxyNoTemporaries const & stages_, const PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , stageCount( static_cast( stages_.size() ) ) , pStages( stages_.data() ) , pVertexInputState( pVertexInputState_ ) , pTessellationState( pTessellationState_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) & VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV && setStageCount( uint32_t stageCount_ ) && VULKAN_HPP_NOEXCEPT { stageCount = stageCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPStages( const PipelineShaderStageCreateInfo * pStages_ ) & VULKAN_HPP_NOEXCEPT { pStages = pStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV && setPStages( const PipelineShaderStageCreateInfo * pStages_ ) && VULKAN_HPP_NOEXCEPT { pStages = pStages_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsShaderGroupCreateInfoNV & setStages( ArrayProxyNoTemporaries const & stages_ ) VULKAN_HPP_NOEXCEPT { stageCount = static_cast( stages_.size() ); pStages = stages_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) & VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV && setPVertexInputState( const PipelineVertexInputStateCreateInfo * pVertexInputState_ ) && VULKAN_HPP_NOEXCEPT { pVertexInputState = pVertexInputState_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) & VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV && setPTessellationState( const PipelineTessellationStateCreateInfo * pTessellationState_ ) && VULKAN_HPP_NOEXCEPT { pTessellationState = pTessellationState_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGraphicsShaderGroupCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsShaderGroupCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGraphicsShaderGroupCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stageCount, pStages, pVertexInputState, pTessellationState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GraphicsShaderGroupCreateInfoNV const & ) const = default; #else bool operator==( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stageCount == rhs.stageCount ) && ( pStages == rhs.pStages ) && ( pVertexInputState == rhs.pVertexInputState ) && ( pTessellationState == rhs.pTessellationState ); # endif } bool operator!=( GraphicsShaderGroupCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV; const void * pNext = {}; uint32_t stageCount = {}; const PipelineShaderStageCreateInfo * pStages = {}; const PipelineVertexInputStateCreateInfo * pVertexInputState = {}; const PipelineTessellationStateCreateInfo * pTessellationState = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GraphicsShaderGroupCreateInfoNV; }; #endif template <> struct CppType { using Type = GraphicsShaderGroupCreateInfoNV; }; // wrapper struct for struct VkGraphicsPipelineShaderGroupsCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkGraphicsPipelineShaderGroupsCreateInfoNV.html struct GraphicsPipelineShaderGroupsCreateInfoNV { using NativeType = VkGraphicsPipelineShaderGroupsCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( uint32_t groupCount_ = {}, const GraphicsShaderGroupCreateInfoNV * pGroups_ = {}, uint32_t pipelineCount_ = {}, const Pipeline * pPipelines_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , groupCount{ groupCount_ } , pGroups{ pGroups_ } , pipelineCount{ pipelineCount_ } , pPipelines{ pPipelines_ } { } VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsPipelineShaderGroupsCreateInfoNV( ArrayProxyNoTemporaries const & groups_, ArrayProxyNoTemporaries const & pipelines_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , groupCount( static_cast( groups_.size() ) ) , pGroups( groups_.data() ) , pipelineCount( static_cast( pipelines_.size() ) ) , pPipelines( pipelines_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) & VULKAN_HPP_NOEXCEPT { groupCount = groupCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV && setGroupCount( uint32_t groupCount_ ) && VULKAN_HPP_NOEXCEPT { groupCount = groupCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const GraphicsShaderGroupCreateInfoNV * pGroups_ ) & VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV && setPGroups( const GraphicsShaderGroupCreateInfoNV * pGroups_ ) && VULKAN_HPP_NOEXCEPT { pGroups = pGroups_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( ArrayProxyNoTemporaries const & groups_ ) VULKAN_HPP_NOEXCEPT { groupCount = static_cast( groups_.size() ); pGroups = groups_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) & VULKAN_HPP_NOEXCEPT { pipelineCount = pipelineCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV && setPipelineCount( uint32_t pipelineCount_ ) && VULKAN_HPP_NOEXCEPT { pipelineCount = pipelineCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const Pipeline * pPipelines_ ) & VULKAN_HPP_NOEXCEPT { pPipelines = pPipelines_; return *this; } VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV && setPPipelines( const Pipeline * pPipelines_ ) && VULKAN_HPP_NOEXCEPT { pPipelines = pPipelines_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( ArrayProxyNoTemporaries const & pipelines_ ) VULKAN_HPP_NOEXCEPT { pipelineCount = static_cast( pipelines_.size() ); pPipelines = pipelines_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkGraphicsPipelineShaderGroupsCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkGraphicsPipelineShaderGroupsCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkGraphicsPipelineShaderGroupsCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, groupCount, pGroups, pipelineCount, pPipelines ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const & ) const = default; #else bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( groupCount == rhs.groupCount ) && ( pGroups == rhs.pGroups ) && ( pipelineCount == rhs.pipelineCount ) && ( pPipelines == rhs.pPipelines ); # endif } bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV; const void * pNext = {}; uint32_t groupCount = {}; const GraphicsShaderGroupCreateInfoNV * pGroups = {}; uint32_t pipelineCount = {}; const Pipeline * pPipelines = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = GraphicsPipelineShaderGroupsCreateInfoNV; }; #endif template <> struct CppType { using Type = GraphicsPipelineShaderGroupsCreateInfoNV; }; // wrapper struct for struct VkXYColorEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkXYColorEXT.html struct XYColorEXT { using NativeType = VkXYColorEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT : x{ x_ } , y{ y_ } { } VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT : XYColorEXT( *reinterpret_cast( &rhs ) ) {} XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setX( float x_ ) & VULKAN_HPP_NOEXCEPT { x = x_; return *this; } VULKAN_HPP_CONSTEXPR_14 XYColorEXT && setX( float x_ ) && VULKAN_HPP_NOEXCEPT { x = x_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 XYColorEXT & setY( float y_ ) & VULKAN_HPP_NOEXCEPT { y = y_; return *this; } VULKAN_HPP_CONSTEXPR_14 XYColorEXT && setY( float y_ ) && VULKAN_HPP_NOEXCEPT { y = y_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkXYColorEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkXYColorEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkXYColorEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( x, y ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( XYColorEXT const & ) const = default; #else bool operator==( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( x == rhs.x ) && ( y == rhs.y ); # endif } bool operator!=( XYColorEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: float x = {}; float y = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = XYColorEXT; }; #endif // wrapper struct for struct VkHdrMetadataEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrMetadataEXT.html struct HdrMetadataEXT { using NativeType = VkHdrMetadataEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HdrMetadataEXT( XYColorEXT displayPrimaryRed_ = {}, XYColorEXT displayPrimaryGreen_ = {}, XYColorEXT displayPrimaryBlue_ = {}, XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displayPrimaryRed{ displayPrimaryRed_ } , displayPrimaryGreen{ displayPrimaryGreen_ } , displayPrimaryBlue{ displayPrimaryBlue_ } , whitePoint{ whitePoint_ } , maxLuminance{ maxLuminance_ } , minLuminance{ minLuminance_ } , maxContentLightLevel{ maxContentLightLevel_ } , maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ } { } VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HdrMetadataEXT( *reinterpret_cast( &rhs ) ) {} HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryRed( XYColorEXT const & displayPrimaryRed_ ) & VULKAN_HPP_NOEXCEPT { displayPrimaryRed = displayPrimaryRed_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setDisplayPrimaryRed( XYColorEXT const & displayPrimaryRed_ ) && VULKAN_HPP_NOEXCEPT { displayPrimaryRed = displayPrimaryRed_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryGreen( XYColorEXT const & displayPrimaryGreen_ ) & VULKAN_HPP_NOEXCEPT { displayPrimaryGreen = displayPrimaryGreen_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setDisplayPrimaryGreen( XYColorEXT const & displayPrimaryGreen_ ) && VULKAN_HPP_NOEXCEPT { displayPrimaryGreen = displayPrimaryGreen_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setDisplayPrimaryBlue( XYColorEXT const & displayPrimaryBlue_ ) & VULKAN_HPP_NOEXCEPT { displayPrimaryBlue = displayPrimaryBlue_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setDisplayPrimaryBlue( XYColorEXT const & displayPrimaryBlue_ ) && VULKAN_HPP_NOEXCEPT { displayPrimaryBlue = displayPrimaryBlue_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setWhitePoint( XYColorEXT const & whitePoint_ ) & VULKAN_HPP_NOEXCEPT { whitePoint = whitePoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setWhitePoint( XYColorEXT const & whitePoint_ ) && VULKAN_HPP_NOEXCEPT { whitePoint = whitePoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) & VULKAN_HPP_NOEXCEPT { maxLuminance = maxLuminance_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setMaxLuminance( float maxLuminance_ ) && VULKAN_HPP_NOEXCEPT { maxLuminance = maxLuminance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMinLuminance( float minLuminance_ ) & VULKAN_HPP_NOEXCEPT { minLuminance = minLuminance_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setMinLuminance( float minLuminance_ ) && VULKAN_HPP_NOEXCEPT { minLuminance = minLuminance_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) & VULKAN_HPP_NOEXCEPT { maxContentLightLevel = maxContentLightLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setMaxContentLightLevel( float maxContentLightLevel_ ) && VULKAN_HPP_NOEXCEPT { maxContentLightLevel = maxContentLightLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) & VULKAN_HPP_NOEXCEPT { maxFrameAverageLightLevel = maxFrameAverageLightLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT && setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) && VULKAN_HPP_NOEXCEPT { maxFrameAverageLightLevel = maxFrameAverageLightLevel_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHdrMetadataEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHdrMetadataEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHdrMetadataEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displayPrimaryRed, displayPrimaryGreen, displayPrimaryBlue, whitePoint, maxLuminance, minLuminance, maxContentLightLevel, maxFrameAverageLightLevel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HdrMetadataEXT const & ) const = default; #else bool operator==( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displayPrimaryRed == rhs.displayPrimaryRed ) && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) && ( whitePoint == rhs.whitePoint ) && ( maxLuminance == rhs.maxLuminance ) && ( minLuminance == rhs.minLuminance ) && ( maxContentLightLevel == rhs.maxContentLightLevel ) && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); # endif } bool operator!=( HdrMetadataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eHdrMetadataEXT; const void * pNext = {}; XYColorEXT displayPrimaryRed = {}; XYColorEXT displayPrimaryGreen = {}; XYColorEXT displayPrimaryBlue = {}; XYColorEXT whitePoint = {}; float maxLuminance = {}; float minLuminance = {}; float maxContentLightLevel = {}; float maxFrameAverageLightLevel = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HdrMetadataEXT; }; #endif template <> struct CppType { using Type = HdrMetadataEXT; }; // wrapper struct for struct VkHdrVividDynamicMetadataHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHdrVividDynamicMetadataHUAWEI.html struct HdrVividDynamicMetadataHUAWEI { using NativeType = VkHdrVividDynamicMetadataHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicMetadataSize{ dynamicMetadataSize_ } , pDynamicMetadata{ pDynamicMetadata_ } { } VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HdrVividDynamicMetadataHUAWEI( ArrayProxyNoTemporaries const & dynamicMetadata_, const void * pNext_ = nullptr ) : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) & VULKAN_HPP_NOEXCEPT { dynamicMetadataSize = dynamicMetadataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI && setDynamicMetadataSize( size_t dynamicMetadataSize_ ) && VULKAN_HPP_NOEXCEPT { dynamicMetadataSize = dynamicMetadataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) & VULKAN_HPP_NOEXCEPT { pDynamicMetadata = pDynamicMetadata_; return *this; } VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI && setPDynamicMetadata( const void * pDynamicMetadata_ ) && VULKAN_HPP_NOEXCEPT { pDynamicMetadata = pDynamicMetadata_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( ArrayProxyNoTemporaries const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT { dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T ); pDynamicMetadata = dynamicMetadata_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHdrVividDynamicMetadataHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHdrVividDynamicMetadataHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default; #else bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) && ( pDynamicMetadata == rhs.pDynamicMetadata ); # endif } bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI; const void * pNext = {}; size_t dynamicMetadataSize = {}; const void * pDynamicMetadata = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HdrVividDynamicMetadataHUAWEI; }; #endif template <> struct CppType { using Type = HdrVividDynamicMetadataHUAWEI; }; // wrapper struct for struct VkHeadlessSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHeadlessSurfaceCreateInfoEXT.html struct HeadlessSurfaceCreateInfoEXT { using NativeType = VkHeadlessSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & setFlags( HeadlessSurfaceCreateFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT && setFlags( HeadlessSurfaceCreateFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHeadlessSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHeadlessSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHeadlessSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HeadlessSurfaceCreateInfoEXT const & ) const = default; #else bool operator==( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ); # endif } bool operator!=( HeadlessSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; const void * pNext = {}; HeadlessSurfaceCreateFlagsEXT flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HeadlessSurfaceCreateInfoEXT; }; #endif template <> struct CppType { using Type = HeadlessSurfaceCreateInfoEXT; }; // wrapper struct for struct VkHostAddressRangeConstEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostAddressRangeConstEXT.html struct HostAddressRangeConstEXT { using NativeType = VkHostAddressRangeConstEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HostAddressRangeConstEXT( const void * address_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT : address{ address_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR HostAddressRangeConstEXT( HostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; HostAddressRangeConstEXT( VkHostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HostAddressRangeConstEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HostAddressRangeConstEXT( ArrayProxyNoTemporaries const & address_ ) : address( address_.data() ), size( address_.size() * sizeof( T ) ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ HostAddressRangeConstEXT & operator=( HostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HostAddressRangeConstEXT & operator=( VkHostAddressRangeConstEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT & setAddress( const void * address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT && setAddress( const void * address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HostAddressRangeConstEXT & setAddress( ArrayProxyNoTemporaries const & address_ ) VULKAN_HPP_NOEXCEPT { size = address_.size() * sizeof( T ); address = address_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT & setSize( size_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostAddressRangeConstEXT && setSize( size_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHostAddressRangeConstEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostAddressRangeConstEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostAddressRangeConstEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHostAddressRangeConstEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( address, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HostAddressRangeConstEXT const & ) const = default; #else bool operator==( HostAddressRangeConstEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( address == rhs.address ) && ( size == rhs.size ); # endif } bool operator!=( HostAddressRangeConstEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: const void * address = {}; size_t size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HostAddressRangeConstEXT; }; #endif // wrapper struct for struct VkHostAddressRangeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostAddressRangeEXT.html struct HostAddressRangeEXT { using NativeType = VkHostAddressRangeEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HostAddressRangeEXT( void * address_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT : address{ address_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR HostAddressRangeEXT( HostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; HostAddressRangeEXT( VkHostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : HostAddressRangeEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HostAddressRangeEXT( ArrayProxyNoTemporaries const & address_ ) : address( address_.data() ), size( address_.size() * sizeof( T ) ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ HostAddressRangeEXT & operator=( HostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HostAddressRangeEXT & operator=( VkHostAddressRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT & setAddress( void * address_ ) & VULKAN_HPP_NOEXCEPT { address = address_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT && setAddress( void * address_ ) && VULKAN_HPP_NOEXCEPT { address = address_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template HostAddressRangeEXT & setAddress( ArrayProxyNoTemporaries const & address_ ) VULKAN_HPP_NOEXCEPT { size = address_.size() * sizeof( T ); address = address_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT & setSize( size_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostAddressRangeEXT && setSize( size_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHostAddressRangeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostAddressRangeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostAddressRangeEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHostAddressRangeEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( address, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HostAddressRangeEXT const & ) const = default; #else bool operator==( HostAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( address == rhs.address ) && ( size == rhs.size ); # endif } bool operator!=( HostAddressRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: void * address = {}; size_t size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HostAddressRangeEXT; }; #endif // wrapper struct for struct VkHostImageCopyDevicePerformanceQuery, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageCopyDevicePerformanceQuery.html struct HostImageCopyDevicePerformanceQuery { using NativeType = VkHostImageCopyDevicePerformanceQuery; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQuery; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( Bool32 optimalDeviceAccess_ = {}, Bool32 identicalMemoryLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , optimalDeviceAccess{ optimalDeviceAccess_ } , identicalMemoryLayout{ identicalMemoryLayout_ } { } VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT : HostImageCopyDevicePerformanceQuery( *reinterpret_cast( &rhs ) ) { } HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostImageCopyDevicePerformanceQuery const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHostImageCopyDevicePerformanceQuery *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, optimalDeviceAccess, identicalMemoryLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default; #else bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( optimalDeviceAccess == rhs.optimalDeviceAccess ) && ( identicalMemoryLayout == rhs.identicalMemoryLayout ); # endif } bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eHostImageCopyDevicePerformanceQuery; void * pNext = {}; Bool32 optimalDeviceAccess = {}; Bool32 identicalMemoryLayout = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HostImageCopyDevicePerformanceQuery; }; #endif template <> struct CppType { using Type = HostImageCopyDevicePerformanceQuery; }; using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; // wrapper struct for struct VkHostImageLayoutTransitionInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkHostImageLayoutTransitionInfo.html struct HostImageLayoutTransitionInfo { using NativeType = VkHostImageLayoutTransitionInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( Image image_ = {}, ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , oldLayout{ oldLayout_ } , newLayout{ newLayout_ } , subresourceRange{ subresourceRange_ } { } VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT : HostImageLayoutTransitionInfo( *reinterpret_cast( &rhs ) ) { } HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( ImageLayout oldLayout_ ) & VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo && setOldLayout( ImageLayout oldLayout_ ) && VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( ImageLayout newLayout_ ) & VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo && setNewLayout( ImageLayout newLayout_ ) && VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkHostImageLayoutTransitionInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkHostImageLayoutTransitionInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, oldLayout, newLayout, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default; #else bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( subresourceRange == rhs.subresourceRange ); # endif } bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eHostImageLayoutTransitionInfo; const void * pNext = {}; Image image = {}; ImageLayout oldLayout = ImageLayout::eUndefined; ImageLayout newLayout = ImageLayout::eUndefined; ImageSubresourceRange subresourceRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = HostImageLayoutTransitionInfo; }; #endif template <> struct CppType { using Type = HostImageLayoutTransitionInfo; }; using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; #if defined( VK_USE_PLATFORM_IOS_MVK ) // wrapper struct for struct VkIOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkIOSSurfaceCreateInfoMVK.html struct IOSSurfaceCreateInfoMVK { using NativeType = VkIOSSurfaceCreateInfoMVK; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pView{ pView_ } { } VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT : IOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) { } IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setFlags( IOSSurfaceCreateFlagsMVK flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK && setFlags( IOSSurfaceCreateFlagsMVK flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) & VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK && setPView( const void * pView_ ) && VULKAN_HPP_NOEXCEPT { pView = pView_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pView ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IOSSurfaceCreateInfoMVK const & ) const = default; # else bool operator==( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); # endif } bool operator!=( IOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; const void * pNext = {}; IOSSurfaceCreateFlagsMVK flags = {}; const void * pView = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IOSSurfaceCreateInfoMVK; }; # endif template <> struct CppType { using Type = IOSSurfaceCreateInfoMVK; }; #endif /*VK_USE_PLATFORM_IOS_MVK*/ // wrapper struct for struct VkImageAlignmentControlCreateInfoMESA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageAlignmentControlCreateInfoMESA.html struct ImageAlignmentControlCreateInfoMESA { using NativeType = VkImageAlignmentControlCreateInfoMESA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageAlignmentControlCreateInfoMESA; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maximumRequestedAlignment{ maximumRequestedAlignment_ } { } VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT : ImageAlignmentControlCreateInfoMESA( *reinterpret_cast( &rhs ) ) { } ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) & VULKAN_HPP_NOEXCEPT { maximumRequestedAlignment = maximumRequestedAlignment_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA && setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) && VULKAN_HPP_NOEXCEPT { maximumRequestedAlignment = maximumRequestedAlignment_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageAlignmentControlCreateInfoMESA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageAlignmentControlCreateInfoMESA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maximumRequestedAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default; #else bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maximumRequestedAlignment == rhs.maximumRequestedAlignment ); # endif } bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageAlignmentControlCreateInfoMESA; const void * pNext = {}; uint32_t maximumRequestedAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageAlignmentControlCreateInfoMESA; }; #endif template <> struct CppType { using Type = ImageAlignmentControlCreateInfoMESA; }; // wrapper struct for struct VkImageBlit, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageBlit.html struct ImageBlit { using NativeType = VkImageBlit; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageSubresourceLayers srcSubresource_ = {}, std::array const & srcOffsets_ = {}, ImageSubresourceLayers dstSubresource_ = {}, std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource{ srcSubresource_ } , srcOffsets{ srcOffsets_ } , dstSubresource{ dstSubresource_ } , dstOffsets{ dstOffsets_ } { } VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT : ImageBlit( *reinterpret_cast( &rhs ) ) {} ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit & setSrcOffsets( std::array const & srcOffsets_ ) & VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit && setSrcOffsets( std::array const & srcOffsets_ ) && VULKAN_HPP_NOEXCEPT { srcOffsets = srcOffsets_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageBlit & setDstOffsets( std::array const & dstOffsets_ ) & VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageBlit && setDstOffsets( std::array const & dstOffsets_ ) && VULKAN_HPP_NOEXCEPT { dstOffsets = dstOffsets_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageBlit const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageBlit &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageBlit const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageBlit *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ImageSubresourceLayers const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcSubresource, srcOffsets, dstSubresource, dstOffsets ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageBlit const & ) const = default; #else bool operator==( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcSubresource == rhs.srcSubresource ) && ( srcOffsets == rhs.srcOffsets ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffsets == rhs.dstOffsets ); # endif } bool operator!=( ImageBlit const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageSubresourceLayers srcSubresource = {}; ArrayWrapper1D srcOffsets = {}; ImageSubresourceLayers dstSubresource = {}; ArrayWrapper1D dstOffsets = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageBlit; }; #endif // wrapper struct for struct VkImageCaptureDescriptorDataInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCaptureDescriptorDataInfoEXT.html struct ImageCaptureDescriptorDataInfoEXT { using NativeType = VkImageCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCaptureDescriptorDataInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } { } VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCaptureDescriptorDataInfoEXT( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } ImageCaptureDescriptorDataInfoEXT & operator=( ImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCaptureDescriptorDataInfoEXT & operator=( VkImageCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCaptureDescriptorDataInfoEXT && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCaptureDescriptorDataInfoEXT const & ) const = default; #else bool operator==( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } bool operator!=( ImageCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageCaptureDescriptorDataInfoEXT; const void * pNext = {}; Image image = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCaptureDescriptorDataInfoEXT; }; #endif template <> struct CppType { using Type = ImageCaptureDescriptorDataInfoEXT; }; // wrapper struct for struct VkImageCompressionControlEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionControlEXT.html struct ImageCompressionControlEXT { using NativeType = VkImageCompressionControlEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionControlEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionFlagsEXT flags_ = {}, uint32_t compressionControlPlaneCount_ = {}, ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , compressionControlPlaneCount{ compressionControlPlaneCount_ } , pFixedRateFlags{ pFixedRateFlags_ } { } VULKAN_HPP_CONSTEXPR ImageCompressionControlEXT( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCompressionControlEXT( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCompressionControlEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageCompressionControlEXT( ImageCompressionFlagsEXT flags_, ArrayProxyNoTemporaries const & fixedRateFlags_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , compressionControlPlaneCount( static_cast( fixedRateFlags_.size() ) ) , pFixedRateFlags( fixedRateFlags_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageCompressionControlEXT & operator=( ImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCompressionControlEXT & operator=( VkImageCompressionControlEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setFlags( ImageCompressionFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT && setFlags( ImageCompressionFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) & VULKAN_HPP_NOEXCEPT { compressionControlPlaneCount = compressionControlPlaneCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT && setCompressionControlPlaneCount( uint32_t compressionControlPlaneCount_ ) && VULKAN_HPP_NOEXCEPT { compressionControlPlaneCount = compressionControlPlaneCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT & setPFixedRateFlags( ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) & VULKAN_HPP_NOEXCEPT { pFixedRateFlags = pFixedRateFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCompressionControlEXT && setPFixedRateFlags( ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ ) && VULKAN_HPP_NOEXCEPT { pFixedRateFlags = pFixedRateFlags_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageCompressionControlEXT & setFixedRateFlags( ArrayProxyNoTemporaries const & fixedRateFlags_ ) VULKAN_HPP_NOEXCEPT { compressionControlPlaneCount = static_cast( fixedRateFlags_.size() ); pFixedRateFlags = fixedRateFlags_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageCompressionControlEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCompressionControlEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCompressionControlEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCompressionControlEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, compressionControlPlaneCount, pFixedRateFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCompressionControlEXT const & ) const = default; #else bool operator==( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( compressionControlPlaneCount == rhs.compressionControlPlaneCount ) && ( pFixedRateFlags == rhs.pFixedRateFlags ); # endif } bool operator!=( ImageCompressionControlEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageCompressionControlEXT; const void * pNext = {}; ImageCompressionFlagsEXT flags = {}; uint32_t compressionControlPlaneCount = {}; ImageCompressionFixedRateFlagsEXT * pFixedRateFlags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCompressionControlEXT; }; #endif template <> struct CppType { using Type = ImageCompressionControlEXT; }; // wrapper struct for struct VkImageCompressionPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCompressionPropertiesEXT.html struct ImageCompressionPropertiesEXT { using NativeType = VkImageCompressionPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCompressionPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionFlagsEXT imageCompressionFlags_ = {}, ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageCompressionFlags{ imageCompressionFlags_ } , imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ } { } VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCompressionPropertiesEXT( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) { } ImageCompressionPropertiesEXT & operator=( ImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCompressionPropertiesEXT & operator=( VkImageCompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkImageCompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageCompressionFlags, imageCompressionFixedRateFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCompressionPropertiesEXT const & ) const = default; #else bool operator==( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionFlags == rhs.imageCompressionFlags ) && ( imageCompressionFixedRateFlags == rhs.imageCompressionFixedRateFlags ); # endif } bool operator!=( ImageCompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageCompressionPropertiesEXT; void * pNext = {}; ImageCompressionFlagsEXT imageCompressionFlags = {}; ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCompressionPropertiesEXT; }; #endif template <> struct CppType { using Type = ImageCompressionPropertiesEXT; }; #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImageFormatConstraintsInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatConstraintsInfoFUCHSIA.html struct ImageFormatConstraintsInfoFUCHSIA { using NativeType = VkImageFormatConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatConstraintsInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageCreateInfo imageCreateInfo_ = {}, FormatFeatureFlags requiredFormatFeatures_ = {}, ImageFormatConstraintsFlagsFUCHSIA flags_ = {}, uint64_t sysmemPixelFormat_ = {}, uint32_t colorSpaceCount_ = {}, const SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageCreateInfo{ imageCreateInfo_ } , requiredFormatFeatures{ requiredFormatFeatures_ } , flags{ flags_ } , sysmemPixelFormat{ sysmemPixelFormat_ } , colorSpaceCount{ colorSpaceCount_ } , pColorSpaces{ pColorSpaces_ } { } VULKAN_HPP_CONSTEXPR ImageFormatConstraintsInfoFUCHSIA( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageFormatConstraintsInfoFUCHSIA( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImageFormatConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageFormatConstraintsInfoFUCHSIA( ImageCreateInfo imageCreateInfo_, FormatFeatureFlags requiredFormatFeatures_, ImageFormatConstraintsFlagsFUCHSIA flags_, uint64_t sysmemPixelFormat_, ArrayProxyNoTemporaries const & colorSpaces_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , imageCreateInfo( imageCreateInfo_ ) , requiredFormatFeatures( requiredFormatFeatures_ ) , flags( flags_ ) , sysmemPixelFormat( sysmemPixelFormat_ ) , colorSpaceCount( static_cast( colorSpaces_.size() ) ) , pColorSpaces( colorSpaces_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageFormatConstraintsInfoFUCHSIA & operator=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageFormatConstraintsInfoFUCHSIA & operator=( VkImageFormatConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setImageCreateInfo( ImageCreateInfo const & imageCreateInfo_ ) & VULKAN_HPP_NOEXCEPT { imageCreateInfo = imageCreateInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setImageCreateInfo( ImageCreateInfo const & imageCreateInfo_ ) && VULKAN_HPP_NOEXCEPT { imageCreateInfo = imageCreateInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) & VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setRequiredFormatFeatures( FormatFeatureFlags requiredFormatFeatures_ ) && VULKAN_HPP_NOEXCEPT { requiredFormatFeatures = requiredFormatFeatures_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setFlags( ImageFormatConstraintsFlagsFUCHSIA flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setFlags( ImageFormatConstraintsFlagsFUCHSIA flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) & VULKAN_HPP_NOEXCEPT { sysmemPixelFormat = sysmemPixelFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setSysmemPixelFormat( uint64_t sysmemPixelFormat_ ) && VULKAN_HPP_NOEXCEPT { sysmemPixelFormat = sysmemPixelFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setColorSpaceCount( uint32_t colorSpaceCount_ ) & VULKAN_HPP_NOEXCEPT { colorSpaceCount = colorSpaceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setColorSpaceCount( uint32_t colorSpaceCount_ ) && VULKAN_HPP_NOEXCEPT { colorSpaceCount = colorSpaceCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA & setPColorSpaces( const SysmemColorSpaceFUCHSIA * pColorSpaces_ ) & VULKAN_HPP_NOEXCEPT { pColorSpaces = pColorSpaces_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatConstraintsInfoFUCHSIA && setPColorSpaces( const SysmemColorSpaceFUCHSIA * pColorSpaces_ ) && VULKAN_HPP_NOEXCEPT { pColorSpaces = pColorSpaces_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageFormatConstraintsInfoFUCHSIA & setColorSpaces( ArrayProxyNoTemporaries const & colorSpaces_ ) VULKAN_HPP_NOEXCEPT { colorSpaceCount = static_cast( colorSpaces_.size() ); pColorSpaces = colorSpaces_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageFormatConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageFormatConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageCreateInfo, requiredFormatFeatures, flags, sysmemPixelFormat, colorSpaceCount, pColorSpaces ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageFormatConstraintsInfoFUCHSIA const & ) const = default; # else bool operator==( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCreateInfo == rhs.imageCreateInfo ) && ( requiredFormatFeatures == rhs.requiredFormatFeatures ) && ( flags == rhs.flags ) && ( sysmemPixelFormat == rhs.sysmemPixelFormat ) && ( colorSpaceCount == rhs.colorSpaceCount ) && ( pColorSpaces == rhs.pColorSpaces ); # endif } bool operator!=( ImageFormatConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImageFormatConstraintsInfoFUCHSIA; const void * pNext = {}; ImageCreateInfo imageCreateInfo = {}; FormatFeatureFlags requiredFormatFeatures = {}; ImageFormatConstraintsFlagsFUCHSIA flags = {}; uint64_t sysmemPixelFormat = {}; uint32_t colorSpaceCount = {}; const SysmemColorSpaceFUCHSIA * pColorSpaces = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageFormatConstraintsInfoFUCHSIA; }; # endif template <> struct CppType { using Type = ImageFormatConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImageConstraintsInfoFUCHSIA, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageConstraintsInfoFUCHSIA.html struct ImageConstraintsInfoFUCHSIA { using NativeType = VkImageConstraintsInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageConstraintsInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( uint32_t formatConstraintsCount_ = {}, const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ = {}, BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , formatConstraintsCount{ formatConstraintsCount_ } , pFormatConstraints{ pFormatConstraints_ } , bufferCollectionConstraints{ bufferCollectionConstraints_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR ImageConstraintsInfoFUCHSIA( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageConstraintsInfoFUCHSIA( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImageConstraintsInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageConstraintsInfoFUCHSIA( ArrayProxyNoTemporaries const & formatConstraints_, BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , formatConstraintsCount( static_cast( formatConstraints_.size() ) ) , pFormatConstraints( formatConstraints_.data() ) , bufferCollectionConstraints( bufferCollectionConstraints_ ) , flags( flags_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageConstraintsInfoFUCHSIA & operator=( ImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageConstraintsInfoFUCHSIA & operator=( VkImageConstraintsInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) & VULKAN_HPP_NOEXCEPT { formatConstraintsCount = formatConstraintsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA && setFormatConstraintsCount( uint32_t formatConstraintsCount_ ) && VULKAN_HPP_NOEXCEPT { formatConstraintsCount = formatConstraintsCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setPFormatConstraints( const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) & VULKAN_HPP_NOEXCEPT { pFormatConstraints = pFormatConstraints_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA && setPFormatConstraints( const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints_ ) && VULKAN_HPP_NOEXCEPT { pFormatConstraints = pFormatConstraints_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageConstraintsInfoFUCHSIA & setFormatConstraints( ArrayProxyNoTemporaries const & formatConstraints_ ) VULKAN_HPP_NOEXCEPT { formatConstraintsCount = static_cast( formatConstraints_.size() ); pFormatConstraints = formatConstraints_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) & VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA && setBufferCollectionConstraints( BufferCollectionConstraintsInfoFUCHSIA const & bufferCollectionConstraints_ ) && VULKAN_HPP_NOEXCEPT { bufferCollectionConstraints = bufferCollectionConstraints_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA & setFlags( ImageConstraintsInfoFlagsFUCHSIA flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageConstraintsInfoFUCHSIA && setFlags( ImageConstraintsInfoFlagsFUCHSIA flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageConstraintsInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageConstraintsInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageConstraintsInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageConstraintsInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, formatConstraintsCount, pFormatConstraints, bufferCollectionConstraints, flags ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageConstraintsInfoFUCHSIA const & ) const = default; # else bool operator==( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatConstraintsCount == rhs.formatConstraintsCount ) && ( pFormatConstraints == rhs.pFormatConstraints ) && ( bufferCollectionConstraints == rhs.bufferCollectionConstraints ) && ( flags == rhs.flags ); # endif } bool operator!=( ImageConstraintsInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImageConstraintsInfoFUCHSIA; const void * pNext = {}; uint32_t formatConstraintsCount = {}; const ImageFormatConstraintsInfoFUCHSIA * pFormatConstraints = {}; BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints = {}; ImageConstraintsInfoFlagsFUCHSIA flags = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageConstraintsInfoFUCHSIA; }; # endif template <> struct CppType { using Type = ImageConstraintsInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkImageCopy, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageCopy.html struct ImageCopy { using NativeType = VkImageCopy; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCopy( ImageSubresourceLayers srcSubresource_ = {}, Offset3D srcOffset_ = {}, ImageSubresourceLayers dstSubresource_ = {}, Offset3D dstOffset_ = {}, Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource{ srcSubresource_ } , srcOffset{ srcOffset_ } , dstSubresource{ dstSubresource_ } , dstOffset{ dstOffset_ } , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageCopy( *reinterpret_cast( &rhs ) ) {} ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy & setSrcOffset( Offset3D const & srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy && setSrcOffset( Offset3D const & srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy & setDstOffset( Offset3D const & dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy && setDstOffset( Offset3D const & dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageCopy & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageCopy && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageCopy const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCopy &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageCopy const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageCopy *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageCopy const & ) const = default; #else bool operator==( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } bool operator!=( ImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageSubresourceLayers srcSubresource = {}; Offset3D srcOffset = {}; ImageSubresourceLayers dstSubresource = {}; Offset3D dstOffset = {}; Extent3D extent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageCopy; }; #endif // wrapper struct for struct VkImageViewCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCreateInfo.html struct ImageViewCreateInfo { using NativeType = VkImageViewCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateFlags flags_ = {}, Image image_ = {}, ImageViewType viewType_ = ImageViewType::e1D, Format format_ = Format::eUndefined, ComponentMapping components_ = {}, ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , image{ image_ } , viewType{ viewType_ } , format{ format_ } , components{ components_ } , subresourceRange{ subresourceRange_ } { } VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCreateInfo( *reinterpret_cast( &rhs ) ) { } ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFlags( ImageViewCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFlags( ImageViewCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setViewType( ImageViewType viewType_ ) & VULKAN_HPP_NOEXCEPT { viewType = viewType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setViewType( ImageViewType viewType_ ) && VULKAN_HPP_NOEXCEPT { viewType = viewType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setComponents( ComponentMapping const & components_ ) & VULKAN_HPP_NOEXCEPT { components = components_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setComponents( ComponentMapping const & components_ ) && VULKAN_HPP_NOEXCEPT { components = components_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, image, viewType, format, components, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewCreateInfo const & ) const = default; #else bool operator==( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( image == rhs.image ) && ( viewType == rhs.viewType ) && ( format == rhs.format ) && ( components == rhs.components ) && ( subresourceRange == rhs.subresourceRange ); # endif } bool operator!=( ImageViewCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewCreateInfo; const void * pNext = {}; ImageViewCreateFlags flags = {}; Image image = {}; ImageViewType viewType = ImageViewType::e1D; Format format = Format::eUndefined; ComponentMapping components = {}; ImageSubresourceRange subresourceRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewCreateInfo; }; #endif template <> struct CppType { using Type = ImageViewCreateInfo; }; // wrapper struct for struct VkImageDescriptorInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDescriptorInfoEXT.html struct ImageDescriptorInfoEXT { using NativeType = VkImageDescriptorInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDescriptorInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageDescriptorInfoEXT( const ImageViewCreateInfo * pView_ = {}, ImageLayout layout_ = ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pView{ pView_ } , layout{ layout_ } { } VULKAN_HPP_CONSTEXPR ImageDescriptorInfoEXT( ImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageDescriptorInfoEXT( VkImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageDescriptorInfoEXT( *reinterpret_cast( &rhs ) ) { } ImageDescriptorInfoEXT & operator=( ImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageDescriptorInfoEXT & operator=( VkImageDescriptorInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setPView( const ImageViewCreateInfo * pView_ ) & VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setPView( const ImageViewCreateInfo * pView_ ) && VULKAN_HPP_NOEXCEPT { pView = pView_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT & setLayout( ImageLayout layout_ ) & VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDescriptorInfoEXT && setLayout( ImageLayout layout_ ) && VULKAN_HPP_NOEXCEPT { layout = layout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageDescriptorInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDescriptorInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDescriptorInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageDescriptorInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pView, layout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageDescriptorInfoEXT const & ) const = default; #else bool operator==( ImageDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pView == rhs.pView ) && ( layout == rhs.layout ); # endif } bool operator!=( ImageDescriptorInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageDescriptorInfoEXT; const void * pNext = {}; const ImageViewCreateInfo * pView = {}; ImageLayout layout = ImageLayout::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageDescriptorInfoEXT; }; #endif template <> struct CppType { using Type = ImageDescriptorInfoEXT; }; // wrapper struct for struct VkSubresourceLayout, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkSubresourceLayout.html struct SubresourceLayout { using NativeType = VkSubresourceLayout; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubresourceLayout( DeviceSize offset_ = {}, DeviceSize size_ = {}, DeviceSize rowPitch_ = {}, DeviceSize arrayPitch_ = {}, DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT : offset{ offset_ } , size{ size_ } , rowPitch{ rowPitch_ } , arrayPitch{ arrayPitch_ } , depthPitch{ depthPitch_ } { } VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout( *reinterpret_cast( &rhs ) ) {} SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setRowPitch( DeviceSize rowPitch_ ) & VULKAN_HPP_NOEXCEPT { rowPitch = rowPitch_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout && setRowPitch( DeviceSize rowPitch_ ) && VULKAN_HPP_NOEXCEPT { rowPitch = rowPitch_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setArrayPitch( DeviceSize arrayPitch_ ) & VULKAN_HPP_NOEXCEPT { arrayPitch = arrayPitch_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout && setArrayPitch( DeviceSize arrayPitch_ ) && VULKAN_HPP_NOEXCEPT { arrayPitch = arrayPitch_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & setDepthPitch( DeviceSize depthPitch_ ) & VULKAN_HPP_NOEXCEPT { depthPitch = depthPitch_; return *this; } VULKAN_HPP_CONSTEXPR_14 SubresourceLayout && setDepthPitch( DeviceSize depthPitch_ ) && VULKAN_HPP_NOEXCEPT { depthPitch = depthPitch_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkSubresourceLayout const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkSubresourceLayout const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkSubresourceLayout *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( offset, size, rowPitch, arrayPitch, depthPitch ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( SubresourceLayout const & ) const = default; #else bool operator==( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( offset == rhs.offset ) && ( size == rhs.size ) && ( rowPitch == rhs.rowPitch ) && ( arrayPitch == rhs.arrayPitch ) && ( depthPitch == rhs.depthPitch ); # endif } bool operator!=( SubresourceLayout const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize offset = {}; DeviceSize size = {}; DeviceSize rowPitch = {}; DeviceSize arrayPitch = {}; DeviceSize depthPitch = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = SubresourceLayout; }; #endif // wrapper struct for struct VkImageDrmFormatModifierExplicitCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html struct ImageDrmFormatModifierExplicitCreateInfoEXT { using NativeType = VkImageDrmFormatModifierExplicitCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const SubresourceLayout * pPlaneLayouts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifier{ drmFormatModifier_ } , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } , pPlaneLayouts{ pPlaneLayouts_ } { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, ArrayProxyNoTemporaries const & planeLayouts_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , drmFormatModifier( drmFormatModifier_ ) , drmFormatModifierPlaneCount( static_cast( planeLayouts_.size() ) ) , pPlaneLayouts( planeLayouts_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) & VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT && setDrmFormatModifier( uint64_t drmFormatModifier_ ) && VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) & VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT && setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) && VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const SubresourceLayout * pPlaneLayouts_ ) & VULKAN_HPP_NOEXCEPT { pPlaneLayouts = pPlaneLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT && setPPlaneLayouts( const SubresourceLayout * pPlaneLayouts_ ) && VULKAN_HPP_NOEXCEPT { pPlaneLayouts = pPlaneLayouts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( ArrayProxyNoTemporaries const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifierPlaneCount = static_cast( planeLayouts_.size() ); pPlaneLayouts = planeLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageDrmFormatModifierExplicitCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierExplicitCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageDrmFormatModifierExplicitCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifier, drmFormatModifierPlaneCount, pPlaneLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const & ) const = default; #else bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) && ( pPlaneLayouts == rhs.pPlaneLayouts ); # endif } bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; const void * pNext = {}; uint64_t drmFormatModifier = {}; uint32_t drmFormatModifierPlaneCount = {}; const SubresourceLayout * pPlaneLayouts = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; }; #endif template <> struct CppType { using Type = ImageDrmFormatModifierExplicitCreateInfoEXT; }; // wrapper struct for struct VkImageDrmFormatModifierListCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html struct ImageDrmFormatModifierListCreateInfoEXT { using NativeType = VkImageDrmFormatModifierListCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, const uint64_t * pDrmFormatModifiers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifierCount{ drmFormatModifierCount_ } , pDrmFormatModifiers{ pDrmFormatModifiers_ } { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageDrmFormatModifierListCreateInfoEXT( ArrayProxyNoTemporaries const & drmFormatModifiers_, const void * pNext_ = nullptr ) : pNext( pNext_ ), drmFormatModifierCount( static_cast( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) & VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = drmFormatModifierCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT && setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) && VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = drmFormatModifierCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) & VULKAN_HPP_NOEXCEPT { pDrmFormatModifiers = pDrmFormatModifiers_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT && setPDrmFormatModifiers( const uint64_t * pDrmFormatModifiers_ ) && VULKAN_HPP_NOEXCEPT { pDrmFormatModifiers = pDrmFormatModifiers_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( ArrayProxyNoTemporaries const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT { drmFormatModifierCount = static_cast( drmFormatModifiers_.size() ); pDrmFormatModifiers = drmFormatModifiers_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageDrmFormatModifierListCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierListCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageDrmFormatModifierListCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifierCount, pDrmFormatModifiers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const & ) const = default; #else bool operator==( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); # endif } bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; const void * pNext = {}; uint32_t drmFormatModifierCount = {}; const uint64_t * pDrmFormatModifiers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageDrmFormatModifierListCreateInfoEXT; }; #endif template <> struct CppType { using Type = ImageDrmFormatModifierListCreateInfoEXT; }; // wrapper struct for struct VkImageDrmFormatModifierPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageDrmFormatModifierPropertiesEXT.html struct ImageDrmFormatModifierPropertiesEXT { using NativeType = VkImageDrmFormatModifierPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifier{ drmFormatModifier_ } { } VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast( &rhs ) ) { } ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkImageDrmFormatModifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageDrmFormatModifierPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageDrmFormatModifierPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageDrmFormatModifierPropertiesEXT const & ) const = default; #else bool operator==( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ); # endif } bool operator!=( ImageDrmFormatModifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; void * pNext = {}; uint64_t drmFormatModifier = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageDrmFormatModifierPropertiesEXT; }; #endif template <> struct CppType { using Type = ImageDrmFormatModifierPropertiesEXT; }; // wrapper struct for struct VkImageFormatListCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatListCreateInfo.html struct ImageFormatListCreateInfo { using NativeType = VkImageFormatListCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, const Format * pViewFormats_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , viewFormatCount{ viewFormatCount_ } , pViewFormats{ pViewFormats_ } { } VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageFormatListCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageFormatListCreateInfo( ArrayProxyNoTemporaries const & viewFormats_, const void * pNext_ = nullptr ) : pNext( pNext_ ), viewFormatCount( static_cast( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) & VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo && setViewFormatCount( uint32_t viewFormatCount_ ) && VULKAN_HPP_NOEXCEPT { viewFormatCount = viewFormatCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & setPViewFormats( const Format * pViewFormats_ ) & VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo && setPViewFormats( const Format * pViewFormats_ ) && VULKAN_HPP_NOEXCEPT { pViewFormats = pViewFormats_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) ImageFormatListCreateInfo & setViewFormats( ArrayProxyNoTemporaries const & viewFormats_ ) VULKAN_HPP_NOEXCEPT { viewFormatCount = static_cast( viewFormats_.size() ); pViewFormats = viewFormats_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageFormatListCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatListCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageFormatListCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, viewFormatCount, pViewFormats ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageFormatListCreateInfo const & ) const = default; #else bool operator==( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( viewFormatCount == rhs.viewFormatCount ) && ( pViewFormats == rhs.pViewFormats ); # endif } bool operator!=( ImageFormatListCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageFormatListCreateInfo; const void * pNext = {}; uint32_t viewFormatCount = {}; const Format * pViewFormats = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageFormatListCreateInfo; }; #endif template <> struct CppType { using Type = ImageFormatListCreateInfo; }; using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo; // wrapper struct for struct VkImageFormatProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageFormatProperties2.html struct ImageFormatProperties2 { using NativeType = VkImageFormatProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties imageFormatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageFormatProperties{ imageFormatProperties_ } { } VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageFormatProperties2( *reinterpret_cast( &rhs ) ) { } ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkImageFormatProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageFormatProperties2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageFormatProperties2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageFormatProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageFormatProperties2 const & ) const = default; #else bool operator==( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFormatProperties == rhs.imageFormatProperties ); # endif } bool operator!=( ImageFormatProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageFormatProperties2; void * pNext = {}; ImageFormatProperties imageFormatProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageFormatProperties2; }; #endif template <> struct CppType { using Type = ImageFormatProperties2; }; using ImageFormatProperties2KHR = ImageFormatProperties2; // wrapper struct for struct VkImageMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryBarrier.html struct ImageMemoryBarrier { using NativeType = VkImageMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( AccessFlags srcAccessMask_ = {}, AccessFlags dstAccessMask_ = {}, ImageLayout oldLayout_ = ImageLayout::eUndefined, ImageLayout newLayout_ = ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, Image image_ = {}, ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcAccessMask{ srcAccessMask_ } , dstAccessMask{ dstAccessMask_ } , oldLayout{ oldLayout_ } , newLayout{ newLayout_ } , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } , image{ image_ } , subresourceRange{ subresourceRange_ } { } VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryBarrier( *reinterpret_cast( &rhs ) ) {} ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setSrcAccessMask( AccessFlags srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setDstAccessMask( AccessFlags dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setOldLayout( ImageLayout oldLayout_ ) & VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setOldLayout( ImageLayout oldLayout_ ) && VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setNewLayout( ImageLayout newLayout_ ) & VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setNewLayout( ImageLayout newLayout_ ) && VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { srcQueueFamilyIndex = srcQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { dstQueueFamilyIndex = dstQueueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) & VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier && setSubresourceRange( ImageSubresourceRange const & subresourceRange_ ) && VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageMemoryBarrier *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcAccessMask, dstAccessMask, oldLayout, newLayout, srcQueueFamilyIndex, dstQueueFamilyIndex, image, subresourceRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageMemoryBarrier const & ) const = default; #else bool operator==( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ) && ( oldLayout == rhs.oldLayout ) && ( newLayout == rhs.newLayout ) && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) && ( image == rhs.image ) && ( subresourceRange == rhs.subresourceRange ); # endif } bool operator!=( ImageMemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageMemoryBarrier; const void * pNext = {}; AccessFlags srcAccessMask = {}; AccessFlags dstAccessMask = {}; ImageLayout oldLayout = ImageLayout::eUndefined; ImageLayout newLayout = ImageLayout::eUndefined; uint32_t srcQueueFamilyIndex = {}; uint32_t dstQueueFamilyIndex = {}; Image image = {}; ImageSubresourceRange subresourceRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageMemoryBarrier; }; #endif template <> struct CppType { using Type = ImageMemoryBarrier; }; // wrapper struct for struct VkImageMemoryRequirementsInfo2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageMemoryRequirementsInfo2.html struct ImageMemoryRequirementsInfo2 { using NativeType = VkImageMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } { } VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageMemoryRequirementsInfo2 const & ) const = default; #else bool operator==( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } bool operator!=( ImageMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageMemoryRequirementsInfo2; const void * pNext = {}; Image image = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageMemoryRequirementsInfo2; }; #endif template <> struct CppType { using Type = ImageMemoryRequirementsInfo2; }; using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImagePipeSurfaceCreateInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html struct ImagePipeSurfaceCreateInfoFUCHSIA { using NativeType = VkImagePipeSurfaceCreateInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , imagePipeHandle{ imagePipeHandle_ } { } VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA && setFlags( ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) & VULKAN_HPP_NOEXCEPT { imagePipeHandle = imagePipeHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA && setImagePipeHandle( zx_handle_t imagePipeHandle_ ) && VULKAN_HPP_NOEXCEPT { imagePipeHandle = imagePipeHandle_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImagePipeSurfaceCreateInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImagePipeSurfaceCreateInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImagePipeSurfaceCreateInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, imagePipeHandle ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 ); } bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; const void * pNext = {}; ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; zx_handle_t imagePipeHandle = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImagePipeSurfaceCreateInfoFUCHSIA; }; # endif template <> struct CppType { using Type = ImagePipeSurfaceCreateInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkImagePlaneMemoryRequirementsInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImagePlaneMemoryRequirementsInfo.html struct ImagePlaneMemoryRequirementsInfo { using NativeType = VkImagePlaneMemoryRequirementsInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImageAspectFlagBits planeAspect_ = ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , planeAspect{ planeAspect_ } { } VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast( &rhs ) ) { } ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & setPlaneAspect( ImageAspectFlagBits planeAspect_ ) & VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo && setPlaneAspect( ImageAspectFlagBits planeAspect_ ) && VULKAN_HPP_NOEXCEPT { planeAspect = planeAspect_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImagePlaneMemoryRequirementsInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImagePlaneMemoryRequirementsInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImagePlaneMemoryRequirementsInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, planeAspect ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImagePlaneMemoryRequirementsInfo const & ) const = default; #else bool operator==( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( planeAspect == rhs.planeAspect ); # endif } bool operator!=( ImagePlaneMemoryRequirementsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; const void * pNext = {}; ImageAspectFlagBits planeAspect = ImageAspectFlagBits::eColor; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImagePlaneMemoryRequirementsInfo; }; #endif template <> struct CppType { using Type = ImagePlaneMemoryRequirementsInfo; }; using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; // wrapper struct for struct VkImageResolve, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve.html struct ImageResolve { using NativeType = VkImageResolve; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageResolve( ImageSubresourceLayers srcSubresource_ = {}, Offset3D srcOffset_ = {}, ImageSubresourceLayers dstSubresource_ = {}, Offset3D dstOffset_ = {}, Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT : srcSubresource{ srcSubresource_ } , srcOffset{ srcOffset_ } , dstSubresource{ dstSubresource_ } , dstOffset{ dstOffset_ } , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve( *reinterpret_cast( &rhs ) ) {} ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve & setSrcOffset( Offset3D const & srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve && setSrcOffset( Offset3D const & srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve & setDstOffset( Offset3D const & dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve && setDstOffset( Offset3D const & dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageResolve const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageResolve &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageResolve const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageResolve *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageResolve const & ) const = default; #else bool operator==( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } bool operator!=( ImageResolve const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ImageSubresourceLayers srcSubresource = {}; Offset3D srcOffset = {}; ImageSubresourceLayers dstSubresource = {}; Offset3D dstOffset = {}; Extent3D extent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageResolve; }; #endif // wrapper struct for struct VkImageResolve2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageResolve2.html struct ImageResolve2 { using NativeType = VkImageResolve2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageResolve2( ImageSubresourceLayers srcSubresource_ = {}, Offset3D srcOffset_ = {}, ImageSubresourceLayers dstSubresource_ = {}, Offset3D dstOffset_ = {}, Extent3D extent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcSubresource{ srcSubresource_ } , srcOffset{ srcOffset_ } , dstSubresource{ dstSubresource_ } , dstOffset{ dstOffset_ } , extent{ extent_ } { } VULKAN_HPP_CONSTEXPR ImageResolve2( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageResolve2( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageResolve2( *reinterpret_cast( &rhs ) ) {} ImageResolve2 & operator=( ImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageResolve2 & operator=( VkImageResolve2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) & VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setSrcSubresource( ImageSubresourceLayers const & srcSubresource_ ) && VULKAN_HPP_NOEXCEPT { srcSubresource = srcSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setSrcOffset( Offset3D const & srcOffset_ ) & VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setSrcOffset( Offset3D const & srcOffset_ ) && VULKAN_HPP_NOEXCEPT { srcOffset = srcOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) & VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setDstSubresource( ImageSubresourceLayers const & dstSubresource_ ) && VULKAN_HPP_NOEXCEPT { dstSubresource = dstSubresource_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setDstOffset( Offset3D const & dstOffset_ ) & VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setDstOffset( Offset3D const & dstOffset_ ) && VULKAN_HPP_NOEXCEPT { dstOffset = dstOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 & setExtent( Extent3D const & extent_ ) & VULKAN_HPP_NOEXCEPT { extent = extent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageResolve2 && setExtent( Extent3D const & extent_ ) && VULKAN_HPP_NOEXCEPT { extent = extent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageResolve2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageResolve2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageResolve2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageResolve2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcSubresource, srcOffset, dstSubresource, dstOffset, extent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageResolve2 const & ) const = default; #else bool operator==( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcSubresource == rhs.srcSubresource ) && ( srcOffset == rhs.srcOffset ) && ( dstSubresource == rhs.dstSubresource ) && ( dstOffset == rhs.dstOffset ) && ( extent == rhs.extent ); # endif } bool operator!=( ImageResolve2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageResolve2; const void * pNext = {}; ImageSubresourceLayers srcSubresource = {}; Offset3D srcOffset = {}; ImageSubresourceLayers dstSubresource = {}; Offset3D dstOffset = {}; Extent3D extent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageResolve2; }; #endif template <> struct CppType { using Type = ImageResolve2; }; using ImageResolve2KHR = ImageResolve2; // wrapper struct for struct VkImageSparseMemoryRequirementsInfo2, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSparseMemoryRequirementsInfo2.html struct ImageSparseMemoryRequirementsInfo2 { using NativeType = VkImageSparseMemoryRequirementsInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } { } VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast( &rhs ) ) { } ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSparseMemoryRequirementsInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSparseMemoryRequirementsInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSparseMemoryRequirementsInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSparseMemoryRequirementsInfo2 const & ) const = default; #else bool operator==( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ); # endif } bool operator!=( ImageSparseMemoryRequirementsInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; const void * pNext = {}; Image image = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSparseMemoryRequirementsInfo2; }; #endif template <> struct CppType { using Type = ImageSparseMemoryRequirementsInfo2; }; using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; // wrapper struct for struct VkImageStencilUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageStencilUsageCreateInfo.html struct ImageStencilUsageCreateInfo { using NativeType = VkImageStencilUsageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageUsageFlags stencilUsage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stencilUsage{ stencilUsage_ } { } VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageStencilUsageCreateInfo( *reinterpret_cast( &rhs ) ) { } ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & setStencilUsage( ImageUsageFlags stencilUsage_ ) & VULKAN_HPP_NOEXCEPT { stencilUsage = stencilUsage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo && setStencilUsage( ImageUsageFlags stencilUsage_ ) && VULKAN_HPP_NOEXCEPT { stencilUsage = stencilUsage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageStencilUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageStencilUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageStencilUsageCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, stencilUsage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageStencilUsageCreateInfo const & ) const = default; #else bool operator==( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stencilUsage == rhs.stencilUsage ); # endif } bool operator!=( ImageStencilUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageStencilUsageCreateInfo; const void * pNext = {}; ImageUsageFlags stencilUsage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageStencilUsageCreateInfo; }; #endif template <> struct CppType { using Type = ImageStencilUsageCreateInfo; }; using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo; // wrapper struct for struct VkImageSwapchainCreateInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageSwapchainCreateInfoKHR.html struct ImageSwapchainCreateInfoKHR { using NativeType = VkImageSwapchainCreateInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , swapchain{ swapchain_ } { } VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSwapchainCreateInfoKHR( *reinterpret_cast( &rhs ) ) { } ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & setSwapchain( SwapchainKHR swapchain_ ) & VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR && setSwapchain( SwapchainKHR swapchain_ ) && VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageSwapchainCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageSwapchainCreateInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageSwapchainCreateInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, swapchain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageSwapchainCreateInfoKHR const & ) const = default; #else bool operator==( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( swapchain == rhs.swapchain ); # endif } bool operator!=( ImageSwapchainCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; const void * pNext = {}; SwapchainKHR swapchain = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageSwapchainCreateInfoKHR; }; #endif template <> struct CppType { using Type = ImageSwapchainCreateInfoKHR; }; // wrapper struct for struct VkImageViewASTCDecodeModeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewASTCDecodeModeEXT.html struct ImageViewASTCDecodeModeEXT { using NativeType = VkImageViewASTCDecodeModeEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( Format decodeMode_ = Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , decodeMode{ decodeMode_ } { } VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewASTCDecodeModeEXT( *reinterpret_cast( &rhs ) ) { } ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & setDecodeMode( Format decodeMode_ ) & VULKAN_HPP_NOEXCEPT { decodeMode = decodeMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT && setDecodeMode( Format decodeMode_ ) && VULKAN_HPP_NOEXCEPT { decodeMode = decodeMode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewASTCDecodeModeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewASTCDecodeModeEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewASTCDecodeModeEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, decodeMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewASTCDecodeModeEXT const & ) const = default; #else bool operator==( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeMode == rhs.decodeMode ); # endif } bool operator!=( ImageViewASTCDecodeModeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; const void * pNext = {}; Format decodeMode = Format::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewASTCDecodeModeEXT; }; #endif template <> struct CppType { using Type = ImageViewASTCDecodeModeEXT; }; // wrapper struct for struct VkImageViewAddressPropertiesNVX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewAddressPropertiesNVX.html struct ImageViewAddressPropertiesNVX { using NativeType = VkImageViewAddressPropertiesNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( DeviceAddress deviceAddress_ = {}, DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceAddress{ deviceAddress_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewAddressPropertiesNVX( *reinterpret_cast( &rhs ) ) { } ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkImageViewAddressPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewAddressPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewAddressPropertiesNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceAddress, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewAddressPropertiesNVX const & ) const = default; #else bool operator==( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceAddress == rhs.deviceAddress ) && ( size == rhs.size ); # endif } bool operator!=( ImageViewAddressPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewAddressPropertiesNVX; void * pNext = {}; DeviceAddress deviceAddress = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewAddressPropertiesNVX; }; #endif template <> struct CppType { using Type = ImageViewAddressPropertiesNVX; }; // wrapper struct for struct VkImageViewCaptureDescriptorDataInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewCaptureDescriptorDataInfoEXT.html struct ImageViewCaptureDescriptorDataInfoEXT { using NativeType = VkImageViewCaptureDescriptorDataInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageView imageView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageView{ imageView_ } { } VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewCaptureDescriptorDataInfoEXT( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewCaptureDescriptorDataInfoEXT( *reinterpret_cast( &rhs ) ) { } ImageViewCaptureDescriptorDataInfoEXT & operator=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewCaptureDescriptorDataInfoEXT & operator=( VkImageViewCaptureDescriptorDataInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT & setImageView( ImageView imageView_ ) & VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewCaptureDescriptorDataInfoEXT && setImageView( ImageView imageView_ ) && VULKAN_HPP_NOEXCEPT { imageView = imageView_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewCaptureDescriptorDataInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewCaptureDescriptorDataInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewCaptureDescriptorDataInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewCaptureDescriptorDataInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageView ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewCaptureDescriptorDataInfoEXT const & ) const = default; #else bool operator==( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ); # endif } bool operator!=( ImageViewCaptureDescriptorDataInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewCaptureDescriptorDataInfoEXT; const void * pNext = {}; ImageView imageView = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewCaptureDescriptorDataInfoEXT; }; #endif template <> struct CppType { using Type = ImageViewCaptureDescriptorDataInfoEXT; }; // wrapper struct for struct VkImageViewHandleInfoNVX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewHandleInfoNVX.html struct ImageViewHandleInfoNVX { using NativeType = VkImageViewHandleInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageView imageView_ = {}, DescriptorType descriptorType_ = DescriptorType::eSampler, Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageView{ imageView_ } , descriptorType{ descriptorType_ } , sampler{ sampler_ } { } VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewHandleInfoNVX( *reinterpret_cast( &rhs ) ) { } ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setImageView( ImageView imageView_ ) & VULKAN_HPP_NOEXCEPT { imageView = imageView_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX && setImageView( ImageView imageView_ ) && VULKAN_HPP_NOEXCEPT { imageView = imageView_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setDescriptorType( DescriptorType descriptorType_ ) & VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX && setDescriptorType( DescriptorType descriptorType_ ) && VULKAN_HPP_NOEXCEPT { descriptorType = descriptorType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & setSampler( Sampler sampler_ ) & VULKAN_HPP_NOEXCEPT { sampler = sampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX && setSampler( Sampler sampler_ ) && VULKAN_HPP_NOEXCEPT { sampler = sampler_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewHandleInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewHandleInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewHandleInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageView, descriptorType, sampler ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewHandleInfoNVX const & ) const = default; #else bool operator==( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageView == rhs.imageView ) && ( descriptorType == rhs.descriptorType ) && ( sampler == rhs.sampler ); # endif } bool operator!=( ImageViewHandleInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewHandleInfoNVX; const void * pNext = {}; ImageView imageView = {}; DescriptorType descriptorType = DescriptorType::eSampler; Sampler sampler = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewHandleInfoNVX; }; #endif template <> struct CppType { using Type = ImageViewHandleInfoNVX; }; // wrapper struct for struct VkImageViewMinLodCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewMinLodCreateInfoEXT.html struct ImageViewMinLodCreateInfoEXT { using NativeType = VkImageViewMinLodCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewMinLodCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minLod{ minLod_ } { } VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewMinLodCreateInfoEXT( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewMinLodCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } ImageViewMinLodCreateInfoEXT & operator=( ImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewMinLodCreateInfoEXT & operator=( VkImageViewMinLodCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT & setMinLod( float minLod_ ) & VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewMinLodCreateInfoEXT && setMinLod( float minLod_ ) && VULKAN_HPP_NOEXCEPT { minLod = minLod_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewMinLodCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewMinLodCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewMinLodCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewMinLodCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minLod ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewMinLodCreateInfoEXT const & ) const = default; #else bool operator==( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); # endif } bool operator!=( ImageViewMinLodCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewMinLodCreateInfoEXT; const void * pNext = {}; float minLod = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewMinLodCreateInfoEXT; }; #endif template <> struct CppType { using Type = ImageViewMinLodCreateInfoEXT; }; // wrapper struct for struct VkImageViewSampleWeightCreateInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSampleWeightCreateInfoQCOM.html struct ImageViewSampleWeightCreateInfoQCOM { using NativeType = VkImageViewSampleWeightCreateInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSampleWeightCreateInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( Offset2D filterCenter_ = {}, Extent2D filterSize_ = {}, uint32_t numPhases_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , filterCenter{ filterCenter_ } , filterSize{ filterSize_ } , numPhases{ numPhases_ } { } VULKAN_HPP_CONSTEXPR ImageViewSampleWeightCreateInfoQCOM( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewSampleWeightCreateInfoQCOM( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewSampleWeightCreateInfoQCOM( *reinterpret_cast( &rhs ) ) { } ImageViewSampleWeightCreateInfoQCOM & operator=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewSampleWeightCreateInfoQCOM & operator=( VkImageViewSampleWeightCreateInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterCenter( Offset2D const & filterCenter_ ) & VULKAN_HPP_NOEXCEPT { filterCenter = filterCenter_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM && setFilterCenter( Offset2D const & filterCenter_ ) && VULKAN_HPP_NOEXCEPT { filterCenter = filterCenter_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setFilterSize( Extent2D const & filterSize_ ) & VULKAN_HPP_NOEXCEPT { filterSize = filterSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM && setFilterSize( Extent2D const & filterSize_ ) && VULKAN_HPP_NOEXCEPT { filterSize = filterSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM & setNumPhases( uint32_t numPhases_ ) & VULKAN_HPP_NOEXCEPT { numPhases = numPhases_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSampleWeightCreateInfoQCOM && setNumPhases( uint32_t numPhases_ ) && VULKAN_HPP_NOEXCEPT { numPhases = numPhases_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewSampleWeightCreateInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewSampleWeightCreateInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewSampleWeightCreateInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewSampleWeightCreateInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, filterCenter, filterSize, numPhases ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewSampleWeightCreateInfoQCOM const & ) const = default; #else bool operator==( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( filterCenter == rhs.filterCenter ) && ( filterSize == rhs.filterSize ) && ( numPhases == rhs.numPhases ); # endif } bool operator!=( ImageViewSampleWeightCreateInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewSampleWeightCreateInfoQCOM; const void * pNext = {}; Offset2D filterCenter = {}; Extent2D filterSize = {}; uint32_t numPhases = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewSampleWeightCreateInfoQCOM; }; #endif template <> struct CppType { using Type = ImageViewSampleWeightCreateInfoQCOM; }; // wrapper struct for struct VkImageViewSlicedCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewSlicedCreateInfoEXT.html struct ImageViewSlicedCreateInfoEXT { using NativeType = VkImageViewSlicedCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewSlicedCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , sliceOffset{ sliceOffset_ } , sliceCount{ sliceCount_ } { } VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewSlicedCreateInfoEXT( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewSlicedCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } ImageViewSlicedCreateInfoEXT & operator=( ImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewSlicedCreateInfoEXT & operator=( VkImageViewSlicedCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceOffset( uint32_t sliceOffset_ ) & VULKAN_HPP_NOEXCEPT { sliceOffset = sliceOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT && setSliceOffset( uint32_t sliceOffset_ ) && VULKAN_HPP_NOEXCEPT { sliceOffset = sliceOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT & setSliceCount( uint32_t sliceCount_ ) & VULKAN_HPP_NOEXCEPT { sliceCount = sliceCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewSlicedCreateInfoEXT && setSliceCount( uint32_t sliceCount_ ) && VULKAN_HPP_NOEXCEPT { sliceCount = sliceCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewSlicedCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewSlicedCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewSlicedCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewSlicedCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, sliceOffset, sliceCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewSlicedCreateInfoEXT const & ) const = default; #else bool operator==( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sliceOffset == rhs.sliceOffset ) && ( sliceCount == rhs.sliceCount ); # endif } bool operator!=( ImageViewSlicedCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewSlicedCreateInfoEXT; const void * pNext = {}; uint32_t sliceOffset = {}; uint32_t sliceCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewSlicedCreateInfoEXT; }; #endif template <> struct CppType { using Type = ImageViewSlicedCreateInfoEXT; }; // wrapper struct for struct VkImageViewUsageCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImageViewUsageCreateInfo.html struct ImageViewUsageCreateInfo { using NativeType = VkImageViewUsageCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , usage{ usage_ } { } VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : ImageViewUsageCreateInfo( *reinterpret_cast( &rhs ) ) { } ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & setUsage( ImageUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo && setUsage( ImageUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImageViewUsageCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImageViewUsageCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImageViewUsageCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImageViewUsageCreateInfo const & ) const = default; #else bool operator==( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); # endif } bool operator!=( ImageViewUsageCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImageViewUsageCreateInfo; const void * pNext = {}; ImageUsageFlags usage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImageViewUsageCreateInfo; }; #endif template <> struct CppType { using Type = ImageViewUsageCreateInfo; }; using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkImportAndroidHardwareBufferInfoANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportAndroidHardwareBufferInfoANDROID.html struct ImportAndroidHardwareBufferInfoANDROID { using NativeType = VkImportAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) { } ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer * buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID && setBuffer( struct AHardwareBuffer * buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const & ) const = default; # else bool operator==( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; const void * pNext = {}; struct AHardwareBuffer * buffer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportAndroidHardwareBufferInfoANDROID; }; # endif template <> struct CppType { using Type = ImportAndroidHardwareBufferInfoANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ // wrapper struct for struct VkImportFenceFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceFdInfoKHR.html struct ImportFenceFdInfoKHR { using NativeType = VkImportFenceFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( Fence fence_ = {}, FenceImportFlags flags_ = {}, ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fence{ fence_ } , flags{ flags_ } , handleType{ handleType_ } , fd{ fd_ } { } VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportFenceFdInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFence( Fence fence_ ) & VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR && setFence( Fence fence_ ) && VULKAN_HPP_NOEXCEPT { fence = fence_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFlags( FenceImportFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR && setFlags( FenceImportFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR && setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & setFd( int fd_ ) & VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR && setFd( int fd_ ) && VULKAN_HPP_NOEXCEPT { fd = fd_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportFenceFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportFenceFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportFenceFdInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fence, flags, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportFenceFdInfoKHR const & ) const = default; #else bool operator==( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); # endif } bool operator!=( ImportFenceFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImportFenceFdInfoKHR; const void * pNext = {}; Fence fence = {}; FenceImportFlags flags = {}; ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportFenceFdInfoKHR; }; #endif template <> struct CppType { using Type = ImportFenceFdInfoKHR; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkImportFenceWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportFenceWin32HandleInfoKHR.html struct ImportFenceWin32HandleInfoKHR { using NativeType = VkImportFenceWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( Fence fence_ = {}, FenceImportFlags flags_ = {}, ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fence{ fence_ } , flags{ flags_ } , handleType{ handleType_ } , handle{ handle_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportFenceWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFence( Fence fence_ ) & VULKAN_HPP_NOEXCEPT { fence = fence_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setFence( Fence fence_ ) && VULKAN_HPP_NOEXCEPT { fence = fence_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setFlags( FenceImportFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setFlags( FenceImportFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setHandle( HANDLE handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportFenceWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportFenceWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportFenceWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fence, flags, handleType, handle, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportFenceWin32HandleInfoKHR const & ) const = default; # else bool operator==( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fence == rhs.fence ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); # endif } bool operator!=( ImportFenceWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; const void * pNext = {}; Fence fence = {}; FenceImportFlags flags = {}; ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportFenceWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ImportFenceWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImportMemoryBufferCollectionFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryBufferCollectionFUCHSIA.html struct ImportMemoryBufferCollectionFUCHSIA { using NativeType = VkImportMemoryBufferCollectionFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryBufferCollectionFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , collection{ collection_ } , index{ index_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryBufferCollectionFUCHSIA( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryBufferCollectionFUCHSIA( *reinterpret_cast( &rhs ) ) { } ImportMemoryBufferCollectionFUCHSIA & operator=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryBufferCollectionFUCHSIA & operator=( VkImportMemoryBufferCollectionFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setCollection( BufferCollectionFUCHSIA collection_ ) & VULKAN_HPP_NOEXCEPT { collection = collection_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA && setCollection( BufferCollectionFUCHSIA collection_ ) && VULKAN_HPP_NOEXCEPT { collection = collection_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA & setIndex( uint32_t index_ ) & VULKAN_HPP_NOEXCEPT { index = index_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryBufferCollectionFUCHSIA && setIndex( uint32_t index_ ) && VULKAN_HPP_NOEXCEPT { index = index_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryBufferCollectionFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryBufferCollectionFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryBufferCollectionFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryBufferCollectionFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, collection, index ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryBufferCollectionFUCHSIA const & ) const = default; # else bool operator==( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( collection == rhs.collection ) && ( index == rhs.index ); # endif } bool operator!=( ImportMemoryBufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMemoryBufferCollectionFUCHSIA; const void * pNext = {}; BufferCollectionFUCHSIA collection = {}; uint32_t index = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryBufferCollectionFUCHSIA; }; # endif template <> struct CppType { using Type = ImportMemoryBufferCollectionFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkImportMemoryFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryFdInfoKHR.html struct ImportMemoryFdInfoKHR { using NativeType = VkImportMemoryFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , fd{ fd_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryFdInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & setFd( int fd_ ) & VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR && setFd( int fd_ ) && VULKAN_HPP_NOEXCEPT { fd = fd_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryFdInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryFdInfoKHR const & ) const = default; #else bool operator==( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); # endif } bool operator!=( ImportMemoryFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImportMemoryFdInfoKHR; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryFdInfoKHR; }; #endif template <> struct CppType { using Type = ImportMemoryFdInfoKHR; }; // wrapper struct for struct VkImportMemoryHostPointerInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryHostPointerInfoEXT.html struct ImportMemoryHostPointerInfoEXT { using NativeType = VkImportMemoryHostPointerInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * pHostPointer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , pHostPointer{ pHostPointer_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryHostPointerInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & setPHostPointer( void * pHostPointer_ ) & VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT && setPHostPointer( void * pHostPointer_ ) && VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryHostPointerInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryHostPointerInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryHostPointerInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, pHostPointer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryHostPointerInfoEXT const & ) const = default; #else bool operator==( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( pHostPointer == rhs.pHostPointer ); # endif } bool operator!=( ImportMemoryHostPointerInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; void * pHostPointer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryHostPointerInfoEXT; }; #endif template <> struct CppType { using Type = ImportMemoryHostPointerInfoEXT; }; #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkImportMemoryMetalHandleInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryMetalHandleInfoEXT.html struct ImportMemoryMetalHandleInfoEXT { using NativeType = VkImportMemoryMetalHandleInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryMetalHandleInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , handle{ handle_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryMetalHandleInfoEXT( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryMetalHandleInfoEXT( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMemoryMetalHandleInfoEXT & operator=( ImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryMetalHandleInfoEXT & operator=( VkImportMemoryMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT & setHandle( void * handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryMetalHandleInfoEXT && setHandle( void * handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, handle ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryMetalHandleInfoEXT const & ) const = default; # else bool operator==( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); # endif } bool operator!=( ImportMemoryMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMemoryMetalHandleInfoEXT; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; void * handle = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryMetalHandleInfoEXT; }; # endif template <> struct CppType { using Type = ImportMemoryMetalHandleInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkImportMemoryWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoKHR.html struct ImportMemoryWin32HandleInfoKHR { using NativeType = VkImportMemoryWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , handle{ handle_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR && setHandle( HANDLE handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, handle, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryWin32HandleInfoKHR const & ) const = default; # else bool operator==( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); # endif } bool operator!=( ImportMemoryWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ImportMemoryWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkImportMemoryWin32HandleInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryWin32HandleInfoNV.html struct ImportMemoryWin32HandleInfoNV { using NativeType = VkImportMemoryWin32HandleInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , handle{ handle_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryWin32HandleInfoNV( *reinterpret_cast( &rhs ) ) { } ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV && setHandleType( ExternalMemoryHandleTypeFlagsNV handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV && setHandle( HANDLE handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryWin32HandleInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryWin32HandleInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, handle ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMemoryWin32HandleInfoNV const & ) const = default; # else bool operator==( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ); # endif } bool operator!=( ImportMemoryWin32HandleInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; const void * pNext = {}; ExternalMemoryHandleTypeFlagsNV handleType = {}; HANDLE handle = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryWin32HandleInfoNV; }; # endif template <> struct CppType { using Type = ImportMemoryWin32HandleInfoNV; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImportMemoryZirconHandleInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMemoryZirconHandleInfoFUCHSIA.html struct ImportMemoryZirconHandleInfoFUCHSIA { using NativeType = VkImportMemoryZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } , handle{ handle_ } { } VULKAN_HPP_CONSTEXPR ImportMemoryZirconHandleInfoFUCHSIA( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMemoryZirconHandleInfoFUCHSIA( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMemoryZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } ImportMemoryZirconHandleInfoFUCHSIA & operator=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMemoryZirconHandleInfoFUCHSIA & operator=( VkImportMemoryZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA & setHandle( zx_handle_t handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMemoryZirconHandleInfoFUCHSIA && setHandle( zx_handle_t handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMemoryZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMemoryZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMemoryZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType, handle ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp; if ( auto cmp = memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ) && ( memcmp( &handle, &rhs.handle, sizeof( zx_handle_t ) ) == 0 ); } bool operator!=( ImportMemoryZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eImportMemoryZirconHandleInfoFUCHSIA; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; zx_handle_t handle = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMemoryZirconHandleInfoFUCHSIA; }; # endif template <> struct CppType { using Type = ImportMemoryZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkImportMetalBufferInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalBufferInfoEXT.html struct ImportMetalBufferInfoEXT { using NativeType = VkImportMetalBufferInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalBufferInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mtlBuffer{ mtlBuffer_ } { } VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMetalBufferInfoEXT( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMetalBufferInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMetalBufferInfoEXT & operator=( ImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMetalBufferInfoEXT & operator=( VkImportMetalBufferInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT & setMtlBuffer( MTLBuffer_id mtlBuffer_ ) & VULKAN_HPP_NOEXCEPT { mtlBuffer = mtlBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalBufferInfoEXT && setMtlBuffer( MTLBuffer_id mtlBuffer_ ) && VULKAN_HPP_NOEXCEPT { mtlBuffer = mtlBuffer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMetalBufferInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalBufferInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalBufferInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMetalBufferInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mtlBuffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMetalBufferInfoEXT const & ) const = default; # else bool operator==( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlBuffer == rhs.mtlBuffer ); # endif } bool operator!=( ImportMetalBufferInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMetalBufferInfoEXT; const void * pNext = {}; MTLBuffer_id mtlBuffer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMetalBufferInfoEXT; }; # endif template <> struct CppType { using Type = ImportMetalBufferInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkImportMetalIOSurfaceInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalIOSurfaceInfoEXT.html struct ImportMetalIOSurfaceInfoEXT { using NativeType = VkImportMetalIOSurfaceInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalIoSurfaceInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , ioSurface{ ioSurface_ } { } VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMetalIOSurfaceInfoEXT( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMetalIOSurfaceInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMetalIOSurfaceInfoEXT & operator=( ImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMetalIOSurfaceInfoEXT & operator=( VkImportMetalIOSurfaceInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT & setIoSurface( IOSurfaceRef ioSurface_ ) & VULKAN_HPP_NOEXCEPT { ioSurface = ioSurface_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalIOSurfaceInfoEXT && setIoSurface( IOSurfaceRef ioSurface_ ) && VULKAN_HPP_NOEXCEPT { ioSurface = ioSurface_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMetalIOSurfaceInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalIOSurfaceInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalIOSurfaceInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMetalIOSurfaceInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, ioSurface ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMetalIOSurfaceInfoEXT const & ) const = default; # else bool operator==( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( ioSurface == rhs.ioSurface ); # endif } bool operator!=( ImportMetalIOSurfaceInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMetalIoSurfaceInfoEXT; const void * pNext = {}; IOSurfaceRef ioSurface = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMetalIOSurfaceInfoEXT; }; # endif template <> struct CppType { using Type = ImportMetalIOSurfaceInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkImportMetalSharedEventInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalSharedEventInfoEXT.html struct ImportMetalSharedEventInfoEXT { using NativeType = VkImportMetalSharedEventInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalSharedEventInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mtlSharedEvent{ mtlSharedEvent_ } { } VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMetalSharedEventInfoEXT( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMetalSharedEventInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMetalSharedEventInfoEXT & operator=( ImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMetalSharedEventInfoEXT & operator=( VkImportMetalSharedEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT & setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) & VULKAN_HPP_NOEXCEPT { mtlSharedEvent = mtlSharedEvent_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalSharedEventInfoEXT && setMtlSharedEvent( MTLSharedEvent_id mtlSharedEvent_ ) && VULKAN_HPP_NOEXCEPT { mtlSharedEvent = mtlSharedEvent_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMetalSharedEventInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalSharedEventInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalSharedEventInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMetalSharedEventInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mtlSharedEvent ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMetalSharedEventInfoEXT const & ) const = default; # else bool operator==( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mtlSharedEvent == rhs.mtlSharedEvent ); # endif } bool operator!=( ImportMetalSharedEventInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMetalSharedEventInfoEXT; const void * pNext = {}; MTLSharedEvent_id mtlSharedEvent = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMetalSharedEventInfoEXT; }; # endif template <> struct CppType { using Type = ImportMetalSharedEventInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkImportMetalTextureInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportMetalTextureInfoEXT.html struct ImportMetalTextureInfoEXT { using NativeType = VkImportMetalTextureInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMetalTextureInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImageAspectFlagBits plane_ = ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , plane{ plane_ } , mtlTexture{ mtlTexture_ } { } VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportMetalTextureInfoEXT( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : ImportMetalTextureInfoEXT( *reinterpret_cast( &rhs ) ) { } ImportMetalTextureInfoEXT & operator=( ImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportMetalTextureInfoEXT & operator=( VkImportMetalTextureInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setPlane( ImageAspectFlagBits plane_ ) & VULKAN_HPP_NOEXCEPT { plane = plane_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT && setPlane( ImageAspectFlagBits plane_ ) && VULKAN_HPP_NOEXCEPT { plane = plane_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT & setMtlTexture( MTLTexture_id mtlTexture_ ) & VULKAN_HPP_NOEXCEPT { mtlTexture = mtlTexture_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportMetalTextureInfoEXT && setMtlTexture( MTLTexture_id mtlTexture_ ) && VULKAN_HPP_NOEXCEPT { mtlTexture = mtlTexture_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportMetalTextureInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalTextureInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportMetalTextureInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportMetalTextureInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, plane, mtlTexture ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportMetalTextureInfoEXT const & ) const = default; # else bool operator==( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( plane == rhs.plane ) && ( mtlTexture == rhs.mtlTexture ); # endif } bool operator!=( ImportMetalTextureInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportMetalTextureInfoEXT; const void * pNext = {}; ImageAspectFlagBits plane = ImageAspectFlagBits::eColor; MTLTexture_id mtlTexture = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportMetalTextureInfoEXT; }; # endif template <> struct CppType { using Type = ImportMetalTextureInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkImportNativeBufferInfoOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportNativeBufferInfoOHOS.html struct ImportNativeBufferInfoOHOS { using NativeType = VkImportNativeBufferInfoOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportNativeBufferInfoOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportNativeBufferInfoOHOS( struct OH_NativeBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR ImportNativeBufferInfoOHOS( ImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportNativeBufferInfoOHOS( VkImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : ImportNativeBufferInfoOHOS( *reinterpret_cast( &rhs ) ) { } ImportNativeBufferInfoOHOS & operator=( ImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportNativeBufferInfoOHOS & operator=( VkImportNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS & setBuffer( struct OH_NativeBuffer * buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportNativeBufferInfoOHOS && setBuffer( struct OH_NativeBuffer * buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportNativeBufferInfoOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportNativeBufferInfoOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportNativeBufferInfoOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportNativeBufferInfoOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportNativeBufferInfoOHOS const & ) const = default; # else bool operator==( ImportNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( ImportNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportNativeBufferInfoOHOS; const void * pNext = {}; struct OH_NativeBuffer * buffer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportNativeBufferInfoOHOS; }; # endif template <> struct CppType { using Type = ImportNativeBufferInfoOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ #if defined( VK_USE_PLATFORM_SCREEN_QNX ) // wrapper struct for struct VkImportScreenBufferInfoQNX, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportScreenBufferInfoQNX.html struct ImportScreenBufferInfoQNX { using NativeType = VkImportScreenBufferInfoQNX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportScreenBufferInfoQNX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportScreenBufferInfoQNX( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT : ImportScreenBufferInfoQNX( *reinterpret_cast( &rhs ) ) { } ImportScreenBufferInfoQNX & operator=( ImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportScreenBufferInfoQNX & operator=( VkImportScreenBufferInfoQNX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX & setBuffer( struct _screen_buffer * buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportScreenBufferInfoQNX && setBuffer( struct _screen_buffer * buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportScreenBufferInfoQNX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportScreenBufferInfoQNX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportScreenBufferInfoQNX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportScreenBufferInfoQNX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, buffer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportScreenBufferInfoQNX const & ) const = default; # else bool operator==( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( buffer == rhs.buffer ); # endif } bool operator!=( ImportScreenBufferInfoQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportScreenBufferInfoQNX; const void * pNext = {}; struct _screen_buffer * buffer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportScreenBufferInfoQNX; }; # endif template <> struct CppType { using Type = ImportScreenBufferInfoQNX; }; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ // wrapper struct for struct VkImportSemaphoreFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreFdInfoKHR.html struct ImportSemaphoreFdInfoKHR { using NativeType = VkImportSemaphoreFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( Semaphore semaphore_ = {}, SemaphoreImportFlags flags_ = {}, ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , semaphore{ semaphore_ } , flags{ flags_ } , handleType{ handleType_ } , fd{ fd_ } { } VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportSemaphoreFdInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setSemaphore( Semaphore semaphore_ ) & VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR && setSemaphore( Semaphore semaphore_ ) && VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFlags( SemaphoreImportFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR && setFlags( SemaphoreImportFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR && setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & setFd( int fd_ ) & VULKAN_HPP_NOEXCEPT { fd = fd_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR && setFd( int fd_ ) && VULKAN_HPP_NOEXCEPT { fd = fd_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportSemaphoreFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportSemaphoreFdInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, semaphore, flags, handleType, fd ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportSemaphoreFdInfoKHR const & ) const = default; #else bool operator==( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( fd == rhs.fd ); # endif } bool operator!=( ImportSemaphoreFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; const void * pNext = {}; Semaphore semaphore = {}; SemaphoreImportFlags flags = {}; ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; int fd = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportSemaphoreFdInfoKHR; }; #endif template <> struct CppType { using Type = ImportSemaphoreFdInfoKHR; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkImportSemaphoreWin32HandleInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreWin32HandleInfoKHR.html struct ImportSemaphoreWin32HandleInfoKHR { using NativeType = VkImportSemaphoreWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( Semaphore semaphore_ = {}, SemaphoreImportFlags flags_ = {}, ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , semaphore{ semaphore_ } , flags{ flags_ } , handleType{ handleType_ } , handle{ handle_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setSemaphore( Semaphore semaphore_ ) & VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setSemaphore( Semaphore semaphore_ ) && VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setFlags( SemaphoreImportFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setFlags( SemaphoreImportFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) & VULKAN_HPP_NOEXCEPT { handle = handle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setHandle( HANDLE handle_ ) && VULKAN_HPP_NOEXCEPT { handle = handle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) & VULKAN_HPP_NOEXCEPT { name = name_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR && setName( LPCWSTR name_ ) && VULKAN_HPP_NOEXCEPT { name = name_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportSemaphoreWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportSemaphoreWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, semaphore, flags, handleType, handle, name ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const & ) const = default; # else bool operator==( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( handle == rhs.handle ) && ( name == rhs.name ); # endif } bool operator!=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; const void * pNext = {}; Semaphore semaphore = {}; SemaphoreImportFlags flags = {}; ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; HANDLE handle = {}; LPCWSTR name = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportSemaphoreWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = ImportSemaphoreWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkImportSemaphoreZirconHandleInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkImportSemaphoreZirconHandleInfoFUCHSIA.html struct ImportSemaphoreZirconHandleInfoFUCHSIA { using NativeType = VkImportSemaphoreZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( Semaphore semaphore_ = {}, SemaphoreImportFlags flags_ = {}, ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , semaphore{ semaphore_ } , flags{ flags_ } , handleType{ handleType_ } , zirconHandle{ zirconHandle_ } { } VULKAN_HPP_CONSTEXPR ImportSemaphoreZirconHandleInfoFUCHSIA( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; ImportSemaphoreZirconHandleInfoFUCHSIA( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : ImportSemaphoreZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ ImportSemaphoreZirconHandleInfoFUCHSIA & operator=( VkImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setSemaphore( Semaphore semaphore_ ) & VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA && setSemaphore( Semaphore semaphore_ ) && VULKAN_HPP_NOEXCEPT { semaphore = semaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setFlags( SemaphoreImportFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA && setFlags( SemaphoreImportFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA && setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA & setZirconHandle( zx_handle_t zirconHandle_ ) & VULKAN_HPP_NOEXCEPT { zirconHandle = zirconHandle_; return *this; } VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreZirconHandleInfoFUCHSIA && setZirconHandle( zx_handle_t zirconHandle_ ) && VULKAN_HPP_NOEXCEPT { zirconHandle = zirconHandle_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkImportSemaphoreZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkImportSemaphoreZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkImportSemaphoreZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, semaphore, flags, handleType, zirconHandle ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = semaphore <=> rhs.semaphore; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = handleType <=> rhs.handleType; cmp != 0 ) return cmp; if ( auto cmp = memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } # endif bool operator==( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( semaphore == rhs.semaphore ) && ( flags == rhs.flags ) && ( handleType == rhs.handleType ) && ( memcmp( &zirconHandle, &rhs.zirconHandle, sizeof( zx_handle_t ) ) == 0 ); } bool operator!=( ImportSemaphoreZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eImportSemaphoreZirconHandleInfoFUCHSIA; const void * pNext = {}; Semaphore semaphore = {}; SemaphoreImportFlags flags = {}; ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; zx_handle_t zirconHandle = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; }; # endif template <> struct CppType { using Type = ImportSemaphoreZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkIndirectCommandsExecutionSetTokenEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsExecutionSetTokenEXT.html struct IndirectCommandsExecutionSetTokenEXT { using NativeType = VkIndirectCommandsExecutionSetTokenEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectExecutionSetInfoTypeEXT type_ = IndirectExecutionSetInfoTypeEXT::ePipelines, ShaderStageFlags shaderStages_ = {} ) VULKAN_HPP_NOEXCEPT : type{ type_ } , shaderStages{ shaderStages_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast( &rhs ) ) { } IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( IndirectExecutionSetInfoTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT && setType( IndirectExecutionSetInfoTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( ShaderStageFlags shaderStages_ ) & VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT && setShaderStages( ShaderStageFlags shaderStages_ ) && VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsExecutionSetTokenEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsExecutionSetTokenEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( type, shaderStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default; #else bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( type == rhs.type ) && ( shaderStages == rhs.shaderStages ); # endif } bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: IndirectExecutionSetInfoTypeEXT type = IndirectExecutionSetInfoTypeEXT::ePipelines; ShaderStageFlags shaderStages = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsExecutionSetTokenEXT; }; #endif // wrapper struct for struct VkIndirectCommandsIndexBufferTokenEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsIndexBufferTokenEXT.html struct IndirectCommandsIndexBufferTokenEXT { using NativeType = VkIndirectCommandsIndexBufferTokenEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsInputModeFlagBitsEXT mode_ = IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) VULKAN_HPP_NOEXCEPT : mode{ mode_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast( &rhs ) ) { } IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & setMode( IndirectCommandsInputModeFlagBitsEXT mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT && setMode( IndirectCommandsInputModeFlagBitsEXT mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsIndexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsIndexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( mode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default; #else bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( mode == rhs.mode ); # endif } bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: IndirectCommandsInputModeFlagBitsEXT mode = IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsIndexBufferTokenEXT; }; #endif // wrapper struct for struct VkPushConstantRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPushConstantRange.html struct PushConstantRange { using NativeType = VkPushConstantRange; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PushConstantRange( ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT : stageFlags{ stageFlags_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( ShaderStageFlags stageFlags_ ) & VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PushConstantRange && setStageFlags( ShaderStageFlags stageFlags_ ) && VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 PushConstantRange && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 PushConstantRange && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPushConstantRange const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPushConstantRange *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( stageFlags, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PushConstantRange const & ) const = default; #else bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: ShaderStageFlags stageFlags = {}; uint32_t offset = {}; uint32_t size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PushConstantRange; }; #endif // wrapper struct for struct VkIndirectCommandsPushConstantTokenEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsPushConstantTokenEXT.html struct IndirectCommandsPushConstantTokenEXT { using NativeType = VkIndirectCommandsPushConstantTokenEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( PushConstantRange updateRange_ = {} ) VULKAN_HPP_NOEXCEPT : updateRange{ updateRange_ } {} VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsPushConstantTokenEXT( *reinterpret_cast( &rhs ) ) { } IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & setUpdateRange( PushConstantRange const & updateRange_ ) & VULKAN_HPP_NOEXCEPT { updateRange = updateRange_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT && setUpdateRange( PushConstantRange const & updateRange_ ) && VULKAN_HPP_NOEXCEPT { updateRange = updateRange_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsPushConstantTokenEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsPushConstantTokenEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( updateRange ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default; #else bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( updateRange == rhs.updateRange ); # endif } bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: PushConstantRange updateRange = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsPushConstantTokenEXT; }; #endif // wrapper struct for struct VkIndirectCommandsVertexBufferTokenEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsVertexBufferTokenEXT.html struct IndirectCommandsVertexBufferTokenEXT { using NativeType = VkIndirectCommandsVertexBufferTokenEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( uint32_t vertexBindingUnit_ = {} ) VULKAN_HPP_NOEXCEPT : vertexBindingUnit{ vertexBindingUnit_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast( &rhs ) ) { } IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) & VULKAN_HPP_NOEXCEPT { vertexBindingUnit = vertexBindingUnit_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT && setVertexBindingUnit( uint32_t vertexBindingUnit_ ) && VULKAN_HPP_NOEXCEPT { vertexBindingUnit = vertexBindingUnit_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsVertexBufferTokenEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsVertexBufferTokenEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( vertexBindingUnit ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default; #else bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( vertexBindingUnit == rhs.vertexBindingUnit ); # endif } bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t vertexBindingUnit = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsVertexBufferTokenEXT; }; #endif union IndirectCommandsTokenDataEXT { using NativeType = VkIndirectCommandsTokenDataEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} ) : pPushConstant( pPushConstant_ ) { } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) : pVertexBuffer( pVertexBuffer_ ) {} VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) : pIndexBuffer( pIndexBuffer_ ) {} VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) : pExecutionSet( pExecutionSet_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPPushConstant( const IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) & VULKAN_HPP_NOEXCEPT { pPushConstant = pPushConstant_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT && setPPushConstant( const IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) && VULKAN_HPP_NOEXCEPT { pPushConstant = pPushConstant_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPVertexBuffer( const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) & VULKAN_HPP_NOEXCEPT { pVertexBuffer = pVertexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT && setPVertexBuffer( const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) && VULKAN_HPP_NOEXCEPT { pVertexBuffer = pVertexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPIndexBuffer( const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) & VULKAN_HPP_NOEXCEPT { pIndexBuffer = pIndexBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT && setPIndexBuffer( const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) && VULKAN_HPP_NOEXCEPT { pIndexBuffer = pIndexBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & setPExecutionSet( const IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) & VULKAN_HPP_NOEXCEPT { pExecutionSet = pExecutionSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT && setPExecutionSet( const IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) && VULKAN_HPP_NOEXCEPT { pExecutionSet = pExecutionSet_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsTokenDataEXT const &() const { return *reinterpret_cast( this ); } operator VkIndirectCommandsTokenDataEXT &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS const IndirectCommandsPushConstantTokenEXT * pPushConstant; const IndirectCommandsVertexBufferTokenEXT * pVertexBuffer; const IndirectCommandsIndexBufferTokenEXT * pIndexBuffer; const IndirectCommandsExecutionSetTokenEXT * pExecutionSet; #else const VkIndirectCommandsPushConstantTokenEXT * pPushConstant; const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer; const VkIndirectCommandsIndexBufferTokenEXT * pIndexBuffer; const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsTokenDataEXT; }; #endif // wrapper struct for struct VkIndirectCommandsLayoutTokenEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenEXT.html struct IndirectCommandsLayoutTokenEXT { using NativeType = VkIndirectCommandsLayoutTokenEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsTokenTypeEXT type_ = IndirectCommandsTokenTypeEXT::eExecutionSet, IndirectCommandsTokenDataEXT data_ = {}, uint32_t offset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , data{ data_ } , offset{ offset_ } { } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutTokenEXT( *reinterpret_cast( &rhs ) ) { } IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( IndirectCommandsTokenTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT && setType( IndirectCommandsTokenTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( IndirectCommandsTokenDataEXT const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT && setData( IndirectCommandsTokenDataEXT const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, data, offset ); } #endif public: StructureType sType = StructureType::eIndirectCommandsLayoutTokenEXT; const void * pNext = {}; IndirectCommandsTokenTypeEXT type = IndirectCommandsTokenTypeEXT::eExecutionSet; IndirectCommandsTokenDataEXT data = {}; uint32_t offset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsLayoutTokenEXT; }; #endif template <> struct CppType { using Type = IndirectCommandsLayoutTokenEXT; }; // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoEXT.html struct IndirectCommandsLayoutCreateInfoEXT { using NativeType = VkIndirectCommandsLayoutCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutUsageFlagsEXT flags_ = {}, ShaderStageFlags shaderStages_ = {}, uint32_t indirectStride_ = {}, PipelineLayout pipelineLayout_ = {}, uint32_t tokenCount_ = {}, const IndirectCommandsLayoutTokenEXT * pTokens_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , shaderStages{ shaderStages_ } , indirectStride{ indirectStride_ } , pipelineLayout{ pipelineLayout_ } , tokenCount{ tokenCount_ } , pTokens{ pTokens_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutUsageFlagsEXT flags_, ShaderStageFlags shaderStages_, uint32_t indirectStride_, PipelineLayout pipelineLayout_, ArrayProxyNoTemporaries const & tokens_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , shaderStages( shaderStages_ ) , indirectStride( indirectStride_ ) , pipelineLayout( pipelineLayout_ ) , tokenCount( static_cast( tokens_.size() ) ) , pTokens( tokens_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setFlags( IndirectCommandsLayoutUsageFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setFlags( IndirectCommandsLayoutUsageFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( ShaderStageFlags shaderStages_ ) & VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setShaderStages( ShaderStageFlags shaderStages_ ) && VULKAN_HPP_NOEXCEPT { shaderStages = shaderStages_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) & VULKAN_HPP_NOEXCEPT { indirectStride = indirectStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setIndirectStride( uint32_t indirectStride_ ) && VULKAN_HPP_NOEXCEPT { indirectStride = indirectStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( PipelineLayout pipelineLayout_ ) & VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setPipelineLayout( PipelineLayout pipelineLayout_ ) && VULKAN_HPP_NOEXCEPT { pipelineLayout = pipelineLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) & VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setTokenCount( uint32_t tokenCount_ ) && VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPTokens( const IndirectCommandsLayoutTokenEXT * pTokens_ ) & VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT && setPTokens( const IndirectCommandsLayoutTokenEXT * pTokens_ ) && VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutCreateInfoEXT & setTokens( ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT { tokenCount = static_cast( tokens_.size() ); pTokens = tokens_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default; #else bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( shaderStages == rhs.shaderStages ) && ( indirectStride == rhs.indirectStride ) && ( pipelineLayout == rhs.pipelineLayout ) && ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ); # endif } bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; const void * pNext = {}; IndirectCommandsLayoutUsageFlagsEXT flags = {}; ShaderStageFlags shaderStages = {}; uint32_t indirectStride = {}; PipelineLayout pipelineLayout = {}; uint32_t tokenCount = {}; const IndirectCommandsLayoutTokenEXT * pTokens = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsLayoutCreateInfoEXT; }; #endif template <> struct CppType { using Type = IndirectCommandsLayoutCreateInfoEXT; }; // wrapper struct for struct VkIndirectCommandsLayoutTokenNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutTokenNV.html struct IndirectCommandsLayoutTokenNV { using NativeType = VkIndirectCommandsLayoutTokenNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsTokenTypeNV tokenType_ = IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, Bool32 vertexDynamicStride_ = {}, PipelineLayout pushconstantPipelineLayout_ = {}, ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const IndexType * pIndexTypes_ = {}, const uint32_t * pIndexTypeValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tokenType{ tokenType_ } , stream{ stream_ } , offset{ offset_ } , vertexBindingUnit{ vertexBindingUnit_ } , vertexDynamicStride{ vertexDynamicStride_ } , pushconstantPipelineLayout{ pushconstantPipelineLayout_ } , pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ } , pushconstantOffset{ pushconstantOffset_ } , pushconstantSize{ pushconstantSize_ } , indirectStateFlags{ indirectStateFlags_ } , indexTypeCount{ indexTypeCount_ } , pIndexTypes{ pIndexTypes_ } , pIndexTypeValues{ pIndexTypeValues_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutTokenNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutTokenNV( IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, Bool32 vertexDynamicStride_, PipelineLayout pushconstantPipelineLayout_, ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, IndirectStateFlagsNV indirectStateFlags_, ArrayProxyNoTemporaries const & indexTypes_, ArrayProxyNoTemporaries const & indexTypeValues_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , tokenType( tokenType_ ) , stream( stream_ ) , offset( offset_ ) , vertexBindingUnit( vertexBindingUnit_ ) , vertexDynamicStride( vertexDynamicStride_ ) , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) , pushconstantOffset( pushconstantOffset_ ) , pushconstantSize( pushconstantSize_ ) , indirectStateFlags( indirectStateFlags_ ) , indexTypeCount( static_cast( indexTypes_.size() ) ) , pIndexTypes( indexTypes_.data() ) , pIndexTypeValues( indexTypeValues_.data() ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() ); # else if ( indexTypes_.size() != indexTypeValues_.size() ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setTokenType( IndirectCommandsTokenTypeNV tokenType_ ) & VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setTokenType( IndirectCommandsTokenTypeNV tokenType_ ) && VULKAN_HPP_NOEXCEPT { tokenType = tokenType_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) & VULKAN_HPP_NOEXCEPT { stream = stream_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setStream( uint32_t stream_ ) && VULKAN_HPP_NOEXCEPT { stream = stream_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setOffset( uint32_t offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) & VULKAN_HPP_NOEXCEPT { vertexBindingUnit = vertexBindingUnit_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setVertexBindingUnit( uint32_t vertexBindingUnit_ ) && VULKAN_HPP_NOEXCEPT { vertexBindingUnit = vertexBindingUnit_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setVertexDynamicStride( Bool32 vertexDynamicStride_ ) & VULKAN_HPP_NOEXCEPT { vertexDynamicStride = vertexDynamicStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setVertexDynamicStride( Bool32 vertexDynamicStride_ ) && VULKAN_HPP_NOEXCEPT { vertexDynamicStride = vertexDynamicStride_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( PipelineLayout pushconstantPipelineLayout_ ) & VULKAN_HPP_NOEXCEPT { pushconstantPipelineLayout = pushconstantPipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPushconstantPipelineLayout( PipelineLayout pushconstantPipelineLayout_ ) && VULKAN_HPP_NOEXCEPT { pushconstantPipelineLayout = pushconstantPipelineLayout_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( ShaderStageFlags pushconstantShaderStageFlags_ ) & VULKAN_HPP_NOEXCEPT { pushconstantShaderStageFlags = pushconstantShaderStageFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPushconstantShaderStageFlags( ShaderStageFlags pushconstantShaderStageFlags_ ) && VULKAN_HPP_NOEXCEPT { pushconstantShaderStageFlags = pushconstantShaderStageFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) & VULKAN_HPP_NOEXCEPT { pushconstantOffset = pushconstantOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPushconstantOffset( uint32_t pushconstantOffset_ ) && VULKAN_HPP_NOEXCEPT { pushconstantOffset = pushconstantOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) & VULKAN_HPP_NOEXCEPT { pushconstantSize = pushconstantSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPushconstantSize( uint32_t pushconstantSize_ ) && VULKAN_HPP_NOEXCEPT { pushconstantSize = pushconstantSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndirectStateFlags( IndirectStateFlagsNV indirectStateFlags_ ) & VULKAN_HPP_NOEXCEPT { indirectStateFlags = indirectStateFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setIndirectStateFlags( IndirectStateFlagsNV indirectStateFlags_ ) && VULKAN_HPP_NOEXCEPT { indirectStateFlags = indirectStateFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) & VULKAN_HPP_NOEXCEPT { indexTypeCount = indexTypeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setIndexTypeCount( uint32_t indexTypeCount_ ) && VULKAN_HPP_NOEXCEPT { indexTypeCount = indexTypeCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypes( const IndexType * pIndexTypes_ ) & VULKAN_HPP_NOEXCEPT { pIndexTypes = pIndexTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPIndexTypes( const IndexType * pIndexTypes_ ) && VULKAN_HPP_NOEXCEPT { pIndexTypes = pIndexTypes_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutTokenNV & setIndexTypes( ArrayProxyNoTemporaries const & indexTypes_ ) VULKAN_HPP_NOEXCEPT { indexTypeCount = static_cast( indexTypes_.size() ); pIndexTypes = indexTypes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) & VULKAN_HPP_NOEXCEPT { pIndexTypeValues = pIndexTypeValues_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV && setPIndexTypeValues( const uint32_t * pIndexTypeValues_ ) && VULKAN_HPP_NOEXCEPT { pIndexTypeValues = pIndexTypeValues_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutTokenNV & setIndexTypeValues( ArrayProxyNoTemporaries const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT { indexTypeCount = static_cast( indexTypeValues_.size() ); pIndexTypeValues = indexTypeValues_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsLayoutTokenNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsLayoutTokenNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tokenType, stream, offset, vertexBindingUnit, vertexDynamicStride, pushconstantPipelineLayout, pushconstantShaderStageFlags, pushconstantOffset, pushconstantSize, indirectStateFlags, indexTypeCount, pIndexTypes, pIndexTypeValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsLayoutTokenNV const & ) const = default; #else bool operator==( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tokenType == rhs.tokenType ) && ( stream == rhs.stream ) && ( offset == rhs.offset ) && ( vertexBindingUnit == rhs.vertexBindingUnit ) && ( vertexDynamicStride == rhs.vertexDynamicStride ) && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout ) && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags ) && ( pushconstantOffset == rhs.pushconstantOffset ) && ( pushconstantSize == rhs.pushconstantSize ) && ( indirectStateFlags == rhs.indirectStateFlags ) && ( indexTypeCount == rhs.indexTypeCount ) && ( pIndexTypes == rhs.pIndexTypes ) && ( pIndexTypeValues == rhs.pIndexTypeValues ); # endif } bool operator!=( IndirectCommandsLayoutTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV; const void * pNext = {}; IndirectCommandsTokenTypeNV tokenType = IndirectCommandsTokenTypeNV::eShaderGroup; uint32_t stream = {}; uint32_t offset = {}; uint32_t vertexBindingUnit = {}; Bool32 vertexDynamicStride = {}; PipelineLayout pushconstantPipelineLayout = {}; ShaderStageFlags pushconstantShaderStageFlags = {}; uint32_t pushconstantOffset = {}; uint32_t pushconstantSize = {}; IndirectStateFlagsNV indirectStateFlags = {}; uint32_t indexTypeCount = {}; const IndexType * pIndexTypes = {}; const uint32_t * pIndexTypeValues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsLayoutTokenNV; }; #endif template <> struct CppType { using Type = IndirectCommandsLayoutTokenNV; }; // wrapper struct for struct VkIndirectCommandsLayoutCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutCreateInfoNV.html struct IndirectCommandsLayoutCreateInfoNV { using NativeType = VkIndirectCommandsLayoutCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutUsageFlagsNV flags_ = {}, PipelineBindPoint pipelineBindPoint_ = PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const IndirectCommandsLayoutTokenNV * pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t * pStreamStrides_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pipelineBindPoint{ pipelineBindPoint_ } , tokenCount{ tokenCount_ } , pTokens{ pTokens_ } , streamCount{ streamCount_ } , pStreamStrides{ pStreamStrides_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutUsageFlagsNV flags_, PipelineBindPoint pipelineBindPoint_, ArrayProxyNoTemporaries const & tokens_, ArrayProxyNoTemporaries const & streamStrides_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , pipelineBindPoint( pipelineBindPoint_ ) , tokenCount( static_cast( tokens_.size() ) ) , pTokens( tokens_.data() ) , streamCount( static_cast( streamStrides_.size() ) ) , pStreamStrides( streamStrides_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setFlags( IndirectCommandsLayoutUsageFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setFlags( IndirectCommandsLayoutUsageFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) & VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setPipelineBindPoint( PipelineBindPoint pipelineBindPoint_ ) && VULKAN_HPP_NOEXCEPT { pipelineBindPoint = pipelineBindPoint_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) & VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setTokenCount( uint32_t tokenCount_ ) && VULKAN_HPP_NOEXCEPT { tokenCount = tokenCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPTokens( const IndirectCommandsLayoutTokenNV * pTokens_ ) & VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setPTokens( const IndirectCommandsLayoutTokenNV * pTokens_ ) && VULKAN_HPP_NOEXCEPT { pTokens = pTokens_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutCreateInfoNV & setTokens( ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT { tokenCount = static_cast( tokens_.size() ); pTokens = tokens_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) & VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setStreamCount( uint32_t streamCount_ ) && VULKAN_HPP_NOEXCEPT { streamCount = streamCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t * pStreamStrides_ ) & VULKAN_HPP_NOEXCEPT { pStreamStrides = pStreamStrides_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV && setPStreamStrides( const uint32_t * pStreamStrides_ ) && VULKAN_HPP_NOEXCEPT { pStreamStrides = pStreamStrides_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectCommandsLayoutCreateInfoNV & setStreamStrides( ArrayProxyNoTemporaries const & streamStrides_ ) VULKAN_HPP_NOEXCEPT { streamCount = static_cast( streamStrides_.size() ); pStreamStrides = streamStrides_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsLayoutCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsLayoutCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pipelineBindPoint, tokenCount, pTokens, streamCount, pStreamStrides ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsLayoutCreateInfoNV const & ) const = default; #else bool operator==( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pipelineBindPoint == rhs.pipelineBindPoint ) && ( tokenCount == rhs.tokenCount ) && ( pTokens == rhs.pTokens ) && ( streamCount == rhs.streamCount ) && ( pStreamStrides == rhs.pStreamStrides ); # endif } bool operator!=( IndirectCommandsLayoutCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV; const void * pNext = {}; IndirectCommandsLayoutUsageFlagsNV flags = {}; PipelineBindPoint pipelineBindPoint = PipelineBindPoint::eGraphics; uint32_t tokenCount = {}; const IndirectCommandsLayoutTokenNV * pTokens = {}; uint32_t streamCount = {}; const uint32_t * pStreamStrides = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsLayoutCreateInfoNV; }; #endif template <> struct CppType { using Type = IndirectCommandsLayoutCreateInfoNV; }; // wrapper struct for struct VkIndirectCommandsLayoutPushDataTokenNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectCommandsLayoutPushDataTokenNV.html struct IndirectCommandsLayoutPushDataTokenNV { using NativeType = VkIndirectCommandsLayoutPushDataTokenNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutPushDataTokenNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutPushDataTokenNV( uint32_t pushDataOffset_ = {}, uint32_t pushDataSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pushDataOffset{ pushDataOffset_ } , pushDataSize{ pushDataSize_ } { } VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutPushDataTokenNV( IndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectCommandsLayoutPushDataTokenNV( VkIndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectCommandsLayoutPushDataTokenNV( *reinterpret_cast( &rhs ) ) { } IndirectCommandsLayoutPushDataTokenNV & operator=( IndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectCommandsLayoutPushDataTokenNV & operator=( VkIndirectCommandsLayoutPushDataTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPushDataOffset( uint32_t pushDataOffset_ ) & VULKAN_HPP_NOEXCEPT { pushDataOffset = pushDataOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPushDataOffset( uint32_t pushDataOffset_ ) && VULKAN_HPP_NOEXCEPT { pushDataOffset = pushDataOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV & setPushDataSize( uint32_t pushDataSize_ ) & VULKAN_HPP_NOEXCEPT { pushDataSize = pushDataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutPushDataTokenNV && setPushDataSize( uint32_t pushDataSize_ ) && VULKAN_HPP_NOEXCEPT { pushDataSize = pushDataSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectCommandsLayoutPushDataTokenNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutPushDataTokenNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectCommandsLayoutPushDataTokenNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectCommandsLayoutPushDataTokenNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pushDataOffset, pushDataSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectCommandsLayoutPushDataTokenNV const & ) const = default; #else bool operator==( IndirectCommandsLayoutPushDataTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pushDataOffset == rhs.pushDataOffset ) && ( pushDataSize == rhs.pushDataSize ); # endif } bool operator!=( IndirectCommandsLayoutPushDataTokenNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectCommandsLayoutPushDataTokenNV; const void * pNext = {}; uint32_t pushDataOffset = {}; uint32_t pushDataSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectCommandsLayoutPushDataTokenNV; }; #endif template <> struct CppType { using Type = IndirectCommandsLayoutPushDataTokenNV; }; // wrapper struct for struct VkIndirectExecutionSetPipelineInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetPipelineInfoEXT.html struct IndirectExecutionSetPipelineInfoEXT { using NativeType = VkIndirectExecutionSetPipelineInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( Pipeline initialPipeline_ = {}, uint32_t maxPipelineCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , initialPipeline{ initialPipeline_ } , maxPipelineCount{ maxPipelineCount_ } { } VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) { } IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( Pipeline initialPipeline_ ) & VULKAN_HPP_NOEXCEPT { initialPipeline = initialPipeline_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT && setInitialPipeline( Pipeline initialPipeline_ ) && VULKAN_HPP_NOEXCEPT { initialPipeline = initialPipeline_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) & VULKAN_HPP_NOEXCEPT { maxPipelineCount = maxPipelineCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT && setMaxPipelineCount( uint32_t maxPipelineCount_ ) && VULKAN_HPP_NOEXCEPT { maxPipelineCount = maxPipelineCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetPipelineInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectExecutionSetPipelineInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; #else bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); # endif } bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; const void * pNext = {}; Pipeline initialPipeline = {}; uint32_t maxPipelineCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectExecutionSetPipelineInfoEXT; }; #endif template <> struct CppType { using Type = IndirectExecutionSetPipelineInfoEXT; }; // wrapper struct for struct VkIndirectExecutionSetShaderLayoutInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderLayoutInfoEXT.html struct IndirectExecutionSetShaderLayoutInfoEXT { using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, const DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , setLayoutCount{ setLayoutCount_ } , pSetLayouts{ pSetLayouts_ } { } VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderLayoutInfoEXT( ArrayProxyNoTemporaries const & setLayouts_, const void * pNext_ = nullptr ) : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) & VULKAN_HPP_NOEXCEPT { setLayoutCount = setLayoutCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT && setSetLayoutCount( uint32_t setLayoutCount_ ) && VULKAN_HPP_NOEXCEPT { setLayoutCount = setLayoutCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) & VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT && setPSetLayouts( const DescriptorSetLayout * pSetLayouts_ ) && VULKAN_HPP_NOEXCEPT { pSetLayouts = pSetLayouts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderLayoutInfoEXT & setSetLayouts( ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT { setLayoutCount = static_cast( setLayouts_.size() ); pSetLayouts = setLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderLayoutInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderLayoutInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; #else bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); # endif } bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; const void * pNext = {}; uint32_t setLayoutCount = {}; const DescriptorSetLayout * pSetLayouts = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectExecutionSetShaderLayoutInfoEXT; }; #endif template <> struct CppType { using Type = IndirectExecutionSetShaderLayoutInfoEXT; }; // wrapper struct for struct VkIndirectExecutionSetShaderInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetShaderInfoEXT.html struct IndirectExecutionSetShaderInfoEXT { using NativeType = VkIndirectExecutionSetShaderInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, const ShaderEXT * pInitialShaders_ = {}, const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, uint32_t maxShaderCount_ = {}, uint32_t pushConstantRangeCount_ = {}, const PushConstantRange * pPushConstantRanges_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderCount{ shaderCount_ } , pInitialShaders{ pInitialShaders_ } , pSetLayoutInfos{ pSetLayoutInfos_ } , maxShaderCount{ maxShaderCount_ } , pushConstantRangeCount{ pushConstantRangeCount_ } , pPushConstantRanges{ pPushConstantRanges_ } { } VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderInfoEXT( ArrayProxyNoTemporaries const & initialShaders_, ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, uint32_t maxShaderCount_ = {}, ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , shaderCount( static_cast( initialShaders_.size() ) ) , pInitialShaders( initialShaders_.data() ) , pSetLayoutInfos( setLayoutInfos_.data() ) , maxShaderCount( maxShaderCount_ ) , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) , pPushConstantRanges( pushConstantRanges_.data() ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); # else if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) & VULKAN_HPP_NOEXCEPT { shaderCount = shaderCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setShaderCount( uint32_t shaderCount_ ) && VULKAN_HPP_NOEXCEPT { shaderCount = shaderCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPInitialShaders( const ShaderEXT * pInitialShaders_ ) & VULKAN_HPP_NOEXCEPT { pInitialShaders = pInitialShaders_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setPInitialShaders( const ShaderEXT * pInitialShaders_ ) && VULKAN_HPP_NOEXCEPT { pInitialShaders = pInitialShaders_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderInfoEXT & setInitialShaders( ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT { shaderCount = static_cast( initialShaders_.size() ); pInitialShaders = initialShaders_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPSetLayoutInfos( const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) & VULKAN_HPP_NOEXCEPT { pSetLayoutInfos = pSetLayoutInfos_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setPSetLayoutInfos( const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) && VULKAN_HPP_NOEXCEPT { pSetLayoutInfos = pSetLayoutInfos_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( ArrayProxyNoTemporaries const & setLayoutInfos_ ) VULKAN_HPP_NOEXCEPT { shaderCount = static_cast( setLayoutInfos_.size() ); pSetLayoutInfos = setLayoutInfos_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) & VULKAN_HPP_NOEXCEPT { maxShaderCount = maxShaderCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setMaxShaderCount( uint32_t maxShaderCount_ ) && VULKAN_HPP_NOEXCEPT { maxShaderCount = maxShaderCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) & VULKAN_HPP_NOEXCEPT { pushConstantRangeCount = pushConstantRangeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) && VULKAN_HPP_NOEXCEPT { pushConstantRangeCount = pushConstantRangeCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPPushConstantRanges( const PushConstantRange * pPushConstantRanges_ ) & VULKAN_HPP_NOEXCEPT { pPushConstantRanges = pPushConstantRanges_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT && setPPushConstantRanges( const PushConstantRange * pPushConstantRanges_ ) && VULKAN_HPP_NOEXCEPT { pPushConstantRanges = pPushConstantRanges_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT { pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); pPushConstantRanges = pushConstantRanges_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectExecutionSetShaderInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; #else bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); # endif } bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; const void * pNext = {}; uint32_t shaderCount = {}; const ShaderEXT * pInitialShaders = {}; const IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; uint32_t maxShaderCount = {}; uint32_t pushConstantRangeCount = {}; const PushConstantRange * pPushConstantRanges = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectExecutionSetShaderInfoEXT; }; #endif template <> struct CppType { using Type = IndirectExecutionSetShaderInfoEXT; }; union IndirectExecutionSetInfoEXT { using NativeType = VkIndirectExecutionSetInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) : pPipelineInfo( pPipelineInfo_ ) {} VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) : pShaderInfo( pShaderInfo_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPPipelineInfo( const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) & VULKAN_HPP_NOEXCEPT { pPipelineInfo = pPipelineInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT && setPPipelineInfo( const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) && VULKAN_HPP_NOEXCEPT { pPipelineInfo = pPipelineInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & setPShaderInfo( const IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) & VULKAN_HPP_NOEXCEPT { pShaderInfo = pShaderInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT && setPShaderInfo( const IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) && VULKAN_HPP_NOEXCEPT { pShaderInfo = pShaderInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectExecutionSetInfoEXT const &() const { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetInfoEXT &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS const IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; const IndirectExecutionSetShaderInfoEXT * pShaderInfo; #else const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectExecutionSetInfoEXT; }; #endif // wrapper struct for struct VkIndirectExecutionSetCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkIndirectExecutionSetCreateInfoEXT.html struct IndirectExecutionSetCreateInfoEXT { using NativeType = VkIndirectExecutionSetCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetInfoTypeEXT type_ = IndirectExecutionSetInfoTypeEXT::ePipelines, IndirectExecutionSetInfoEXT info_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , info{ info_ } { } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( IndirectExecutionSetInfoTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT && setType( IndirectExecutionSetInfoTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( IndirectExecutionSetInfoEXT const & info_ ) & VULKAN_HPP_NOEXCEPT { info = info_; return *this; } VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT && setInfo( IndirectExecutionSetInfoEXT const & info_ ) && VULKAN_HPP_NOEXCEPT { info = info_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkIndirectExecutionSetCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkIndirectExecutionSetCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, info ); } #endif public: StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; const void * pNext = {}; IndirectExecutionSetInfoTypeEXT type = IndirectExecutionSetInfoTypeEXT::ePipelines; IndirectExecutionSetInfoEXT info = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = IndirectExecutionSetCreateInfoEXT; }; #endif template <> struct CppType { using Type = IndirectExecutionSetCreateInfoEXT; }; // wrapper struct for struct VkInitializePerformanceApiInfoINTEL, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInitializePerformanceApiInfoINTEL.html struct InitializePerformanceApiInfoINTEL { using NativeType = VkInitializePerformanceApiInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pUserData{ pUserData_ } { } VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : InitializePerformanceApiInfoINTEL( *reinterpret_cast( &rhs ) ) { } InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & setPUserData( void * pUserData_ ) & VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return *this; } VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL && setPUserData( void * pUserData_ ) && VULKAN_HPP_NOEXCEPT { pUserData = pUserData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkInitializePerformanceApiInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInitializePerformanceApiInfoINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkInitializePerformanceApiInfoINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pUserData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; #else bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pUserData == rhs.pUserData ); # endif } bool operator!=( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; const void * pNext = {}; void * pUserData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = InitializePerformanceApiInfoINTEL; }; #endif template <> struct CppType { using Type = InitializePerformanceApiInfoINTEL; }; // wrapper struct for struct VkInputAttachmentAspectReference, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkInputAttachmentAspectReference.html struct InputAttachmentAspectReference { using NativeType = VkInputAttachmentAspectReference; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT : subpass{ subpass_ } , inputAttachmentIndex{ inputAttachmentIndex_ } , aspectMask{ aspectMask_ } { } VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT : InputAttachmentAspectReference( *reinterpret_cast( &rhs ) ) { } InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) & VULKAN_HPP_NOEXCEPT { subpass = subpass_; return *this; } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference && setSubpass( uint32_t subpass_ ) && VULKAN_HPP_NOEXCEPT { subpass = subpass_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) & VULKAN_HPP_NOEXCEPT { inputAttachmentIndex = inputAttachmentIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference && setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) && VULKAN_HPP_NOEXCEPT { inputAttachmentIndex = inputAttachmentIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & setAspectMask( ImageAspectFlags aspectMask_ ) & VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference && setAspectMask( ImageAspectFlags aspectMask_ ) && VULKAN_HPP_NOEXCEPT { aspectMask = aspectMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkInputAttachmentAspectReference const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInputAttachmentAspectReference const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkInputAttachmentAspectReference *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( subpass, inputAttachmentIndex, aspectMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( InputAttachmentAspectReference const & ) const = default; #else bool operator==( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( subpass == rhs.subpass ) && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) && ( aspectMask == rhs.aspectMask ); # endif } bool operator!=( InputAttachmentAspectReference const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t subpass = {}; uint32_t inputAttachmentIndex = {}; ImageAspectFlags aspectMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = InputAttachmentAspectReference; }; #endif using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; // wrapper struct for struct VkInstanceCreateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkInstanceCreateInfo.html struct InstanceCreateInfo { using NativeType = VkInstanceCreateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateFlags flags_ = {}, const ApplicationInfo * pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char * const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pApplicationInfo{ pApplicationInfo_ } , enabledLayerCount{ enabledLayerCount_ } , ppEnabledLayerNames{ ppEnabledLayerNames_ } , enabledExtensionCount{ enabledExtensionCount_ } , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } { } VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : InstanceCreateInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) InstanceCreateInfo( InstanceCreateFlags flags_, const ApplicationInfo * pApplicationInfo_, ArrayProxyNoTemporaries const & pEnabledLayerNames_, ArrayProxyNoTemporaries const & pEnabledExtensionNames_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , pApplicationInfo( pApplicationInfo_ ) , enabledLayerCount( static_cast( pEnabledLayerNames_.size() ) ) , ppEnabledLayerNames( pEnabledLayerNames_.data() ) , enabledExtensionCount( static_cast( pEnabledExtensionNames_.size() ) ) , ppEnabledExtensionNames( pEnabledExtensionNames_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setFlags( InstanceCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setFlags( InstanceCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPApplicationInfo( const ApplicationInfo * pApplicationInfo_ ) & VULKAN_HPP_NOEXCEPT { pApplicationInfo = pApplicationInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setPApplicationInfo( const ApplicationInfo * pApplicationInfo_ ) && VULKAN_HPP_NOEXCEPT { pApplicationInfo = pApplicationInfo_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) & VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setEnabledLayerCount( uint32_t enabledLayerCount_ ) && VULKAN_HPP_NOEXCEPT { enabledLayerCount = enabledLayerCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) & VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setPpEnabledLayerNames( const char * const * ppEnabledLayerNames_ ) && VULKAN_HPP_NOEXCEPT { ppEnabledLayerNames = ppEnabledLayerNames_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) InstanceCreateInfo & setPEnabledLayerNames( ArrayProxyNoTemporaries const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT { enabledLayerCount = static_cast( pEnabledLayerNames_.size() ); ppEnabledLayerNames = pEnabledLayerNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) & VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) && VULKAN_HPP_NOEXCEPT { enabledExtensionCount = enabledExtensionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) & VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return *this; } VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo && setPpEnabledExtensionNames( const char * const * ppEnabledExtensionNames_ ) && VULKAN_HPP_NOEXCEPT { ppEnabledExtensionNames = ppEnabledExtensionNames_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) InstanceCreateInfo & setPEnabledExtensionNames( ArrayProxyNoTemporaries const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT { enabledExtensionCount = static_cast( pEnabledExtensionNames_.size() ); ppEnabledExtensionNames = pEnabledExtensionNames_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkInstanceCreateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkInstanceCreateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkInstanceCreateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pApplicationInfo, enabledLayerCount, ppEnabledLayerNames, enabledExtensionCount, ppEnabledExtensionNames ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = pApplicationInfo <=> rhs.pApplicationInfo; cmp != 0 ) return cmp; if ( auto cmp = enabledLayerCount <=> rhs.enabledLayerCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < enabledLayerCount; ++i ) { if ( ppEnabledLayerNames[i] != rhs.ppEnabledLayerNames[i] ) if ( auto cmp = strcmp( ppEnabledLayerNames[i], rhs.ppEnabledLayerNames[i] ); cmp != 0 ) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } if ( auto cmp = enabledExtensionCount <=> rhs.enabledExtensionCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < enabledExtensionCount; ++i ) { if ( ppEnabledExtensionNames[i] != rhs.ppEnabledExtensionNames[i] ) if ( auto cmp = strcmp( ppEnabledExtensionNames[i], rhs.ppEnabledExtensionNames[i] ); cmp != 0 ) return cmp < 0 ? std::strong_ordering::less : std::strong_ordering::greater; } return std::strong_ordering::equivalent; } #endif bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pApplicationInfo == rhs.pApplicationInfo ) && ( enabledLayerCount == rhs.enabledLayerCount ) && std::equal( ppEnabledLayerNames, ppEnabledLayerNames + enabledLayerCount, rhs.ppEnabledLayerNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ) && ( enabledExtensionCount == rhs.enabledExtensionCount ) && std::equal( ppEnabledExtensionNames, ppEnabledExtensionNames + enabledExtensionCount, rhs.ppEnabledExtensionNames, []( char const * left, char const * right ) { return ( left == right ) || ( strcmp( left, right ) == 0 ); } ); } bool operator!=( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::eInstanceCreateInfo; const void * pNext = {}; InstanceCreateFlags flags = {}; const ApplicationInfo * pApplicationInfo = {}; uint32_t enabledLayerCount = {}; const char * const * ppEnabledLayerNames = {}; uint32_t enabledExtensionCount = {}; const char * const * ppEnabledExtensionNames = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = InstanceCreateInfo; }; #endif template <> struct CppType { using Type = InstanceCreateInfo; }; // wrapper struct for struct VkLatencySleepInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepInfoNV.html struct LatencySleepInfoNV { using NativeType = VkLatencySleepInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , signalSemaphore{ signalSemaphore_ } , value{ value_ } { } VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; LatencySleepInfoNV( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepInfoNV( *reinterpret_cast( &rhs ) ) {} LatencySleepInfoNV & operator=( LatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LatencySleepInfoNV & operator=( VkLatencySleepInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setSignalSemaphore( Semaphore signalSemaphore_ ) & VULKAN_HPP_NOEXCEPT { signalSemaphore = signalSemaphore_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV && setSignalSemaphore( Semaphore signalSemaphore_ ) && VULKAN_HPP_NOEXCEPT { signalSemaphore = signalSemaphore_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV & setValue( uint64_t value_ ) & VULKAN_HPP_NOEXCEPT { value = value_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepInfoNV && setValue( uint64_t value_ ) && VULKAN_HPP_NOEXCEPT { value = value_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLatencySleepInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySleepInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySleepInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLatencySleepInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, signalSemaphore, value ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LatencySleepInfoNV const & ) const = default; #else bool operator==( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( signalSemaphore == rhs.signalSemaphore ) && ( value == rhs.value ); # endif } bool operator!=( LatencySleepInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLatencySleepInfoNV; const void * pNext = {}; Semaphore signalSemaphore = {}; uint64_t value = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LatencySleepInfoNV; }; #endif template <> struct CppType { using Type = LatencySleepInfoNV; }; // wrapper struct for struct VkLatencySleepModeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySleepModeInfoNV.html struct LatencySleepModeInfoNV { using NativeType = VkLatencySleepModeInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySleepModeInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( Bool32 lowLatencyMode_ = {}, Bool32 lowLatencyBoost_ = {}, uint32_t minimumIntervalUs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , lowLatencyMode{ lowLatencyMode_ } , lowLatencyBoost{ lowLatencyBoost_ } , minimumIntervalUs{ minimumIntervalUs_ } { } VULKAN_HPP_CONSTEXPR LatencySleepModeInfoNV( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; LatencySleepModeInfoNV( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySleepModeInfoNV( *reinterpret_cast( &rhs ) ) { } LatencySleepModeInfoNV & operator=( LatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LatencySleepModeInfoNV & operator=( VkLatencySleepModeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyMode( Bool32 lowLatencyMode_ ) & VULKAN_HPP_NOEXCEPT { lowLatencyMode = lowLatencyMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV && setLowLatencyMode( Bool32 lowLatencyMode_ ) && VULKAN_HPP_NOEXCEPT { lowLatencyMode = lowLatencyMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setLowLatencyBoost( Bool32 lowLatencyBoost_ ) & VULKAN_HPP_NOEXCEPT { lowLatencyBoost = lowLatencyBoost_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV && setLowLatencyBoost( Bool32 lowLatencyBoost_ ) && VULKAN_HPP_NOEXCEPT { lowLatencyBoost = lowLatencyBoost_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV & setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) & VULKAN_HPP_NOEXCEPT { minimumIntervalUs = minimumIntervalUs_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySleepModeInfoNV && setMinimumIntervalUs( uint32_t minimumIntervalUs_ ) && VULKAN_HPP_NOEXCEPT { minimumIntervalUs = minimumIntervalUs_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLatencySleepModeInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySleepModeInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySleepModeInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLatencySleepModeInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, lowLatencyMode, lowLatencyBoost, minimumIntervalUs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LatencySleepModeInfoNV const & ) const = default; #else bool operator==( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lowLatencyMode == rhs.lowLatencyMode ) && ( lowLatencyBoost == rhs.lowLatencyBoost ) && ( minimumIntervalUs == rhs.minimumIntervalUs ); # endif } bool operator!=( LatencySleepModeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLatencySleepModeInfoNV; const void * pNext = {}; Bool32 lowLatencyMode = {}; Bool32 lowLatencyBoost = {}; uint32_t minimumIntervalUs = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LatencySleepModeInfoNV; }; #endif template <> struct CppType { using Type = LatencySleepModeInfoNV; }; // wrapper struct for struct VkLatencySubmissionPresentIdNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySubmissionPresentIdNV.html struct LatencySubmissionPresentIdNV { using NativeType = VkLatencySubmissionPresentIdNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySubmissionPresentIdNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentID{ presentID_ } { } VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; LatencySubmissionPresentIdNV( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySubmissionPresentIdNV( *reinterpret_cast( &rhs ) ) { } LatencySubmissionPresentIdNV & operator=( LatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LatencySubmissionPresentIdNV & operator=( VkLatencySubmissionPresentIdNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV & setPresentID( uint64_t presentID_ ) & VULKAN_HPP_NOEXCEPT { presentID = presentID_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySubmissionPresentIdNV && setPresentID( uint64_t presentID_ ) && VULKAN_HPP_NOEXCEPT { presentID = presentID_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLatencySubmissionPresentIdNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySubmissionPresentIdNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySubmissionPresentIdNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLatencySubmissionPresentIdNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentID ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LatencySubmissionPresentIdNV const & ) const = default; #else bool operator==( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentID == rhs.presentID ); # endif } bool operator!=( LatencySubmissionPresentIdNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLatencySubmissionPresentIdNV; const void * pNext = {}; uint64_t presentID = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LatencySubmissionPresentIdNV; }; #endif template <> struct CppType { using Type = LatencySubmissionPresentIdNV; }; // wrapper struct for struct VkLatencySurfaceCapabilitiesNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLatencySurfaceCapabilitiesNV.html struct LatencySurfaceCapabilitiesNV { using NativeType = VkLatencySurfaceCapabilitiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLatencySurfaceCapabilitiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentModeCount{ presentModeCount_ } , pPresentModes{ pPresentModes_ } { } VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; LatencySurfaceCapabilitiesNV( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : LatencySurfaceCapabilitiesNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) LatencySurfaceCapabilitiesNV( ArrayProxyNoTemporaries const & presentModes_, const void * pNext_ = nullptr ) : pNext( pNext_ ), presentModeCount( static_cast( presentModes_.size() ) ), pPresentModes( presentModes_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ LatencySurfaceCapabilitiesNV & operator=( LatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LatencySurfaceCapabilitiesNV & operator=( VkLatencySurfaceCapabilitiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPresentModeCount( uint32_t presentModeCount_ ) & VULKAN_HPP_NOEXCEPT { presentModeCount = presentModeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV && setPresentModeCount( uint32_t presentModeCount_ ) && VULKAN_HPP_NOEXCEPT { presentModeCount = presentModeCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV & setPPresentModes( PresentModeKHR * pPresentModes_ ) & VULKAN_HPP_NOEXCEPT { pPresentModes = pPresentModes_; return *this; } VULKAN_HPP_CONSTEXPR_14 LatencySurfaceCapabilitiesNV && setPPresentModes( PresentModeKHR * pPresentModes_ ) && VULKAN_HPP_NOEXCEPT { pPresentModes = pPresentModes_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) LatencySurfaceCapabilitiesNV & setPresentModes( ArrayProxyNoTemporaries const & presentModes_ ) VULKAN_HPP_NOEXCEPT { presentModeCount = static_cast( presentModes_.size() ); pPresentModes = presentModes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLatencySurfaceCapabilitiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySurfaceCapabilitiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLatencySurfaceCapabilitiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLatencySurfaceCapabilitiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentModeCount, pPresentModes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LatencySurfaceCapabilitiesNV const & ) const = default; #else bool operator==( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeCount == rhs.presentModeCount ) && ( pPresentModes == rhs.pPresentModes ); # endif } bool operator!=( LatencySurfaceCapabilitiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLatencySurfaceCapabilitiesNV; const void * pNext = {}; uint32_t presentModeCount = {}; PresentModeKHR * pPresentModes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LatencySurfaceCapabilitiesNV; }; #endif template <> struct CppType { using Type = LatencySurfaceCapabilitiesNV; }; // wrapper struct for struct VkLayerProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerProperties.html struct LayerProperties { using NativeType = VkLayerProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const & layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT : layerName{ layerName_ } , specVersion{ specVersion_ } , implementationVersion{ implementationVersion_ } , description{ description_ } { } VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkLayerProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLayerProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std:: tuple const &, uint32_t const &, uint32_t const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( layerName, specVersion, implementationVersion, description ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = strcmp( layerName, rhs.layerName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = specVersion <=> rhs.specVersion; cmp != 0 ) return cmp; if ( auto cmp = implementationVersion <=> rhs.implementationVersion; cmp != 0 ) return cmp; if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: ArrayWrapper1D layerName = {}; uint32_t specVersion = {}; uint32_t implementationVersion = {}; ArrayWrapper1D description = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LayerProperties; }; #endif // wrapper struct for struct VkLayerSettingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingEXT.html struct LayerSettingEXT { using NativeType = VkLayerSettingEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LayerSettingEXT( const char * pLayerName_ = {}, const char * pSettingName_ = {}, LayerSettingTypeEXT type_ = LayerSettingTypeEXT::eBool32, uint32_t valueCount_ = {}, const void * pValues_ = {} ) VULKAN_HPP_NOEXCEPT : pLayerName{ pLayerName_ } , pSettingName{ pSettingName_ } , type{ type_ } , valueCount{ valueCount_ } , pValues{ pValues_ } { } VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) // NOTE: you need to provide the type because Bool32 and uint32_t are indistinguishable! LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } LayerSettingEXT( char const * pLayerName_, char const * pSettingName_, LayerSettingTypeEXT type_, ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { VULKAN_HPP_ASSERT( isSameType( type ) ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ LayerSettingEXT & operator=( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LayerSettingEXT & operator=( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPLayerName( const char * pLayerName_ ) & VULKAN_HPP_NOEXCEPT { pLayerName = pLayerName_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT && setPLayerName( const char * pLayerName_ ) && VULKAN_HPP_NOEXCEPT { pLayerName = pLayerName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPSettingName( const char * pSettingName_ ) & VULKAN_HPP_NOEXCEPT { pSettingName = pSettingName_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT && setPSettingName( const char * pSettingName_ ) && VULKAN_HPP_NOEXCEPT { pSettingName = pSettingName_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setType( LayerSettingTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT && setType( LayerSettingTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setValueCount( uint32_t valueCount_ ) & VULKAN_HPP_NOEXCEPT { valueCount = valueCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT && setValueCount( uint32_t valueCount_ ) && VULKAN_HPP_NOEXCEPT { valueCount = valueCount_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } LayerSettingEXT & setValues( ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLayerSettingEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerSettingEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerSettingEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLayerSettingEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( pLayerName, pSettingName, type, valueCount, pValues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( pLayerName != rhs.pLayerName ) if ( auto cmp = strcmp( pLayerName, rhs.pLayerName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( pSettingName != rhs.pSettingName ) if ( auto cmp = strcmp( pSettingName, rhs.pSettingName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = type <=> rhs.type; cmp != 0 ) return cmp; if ( auto cmp = valueCount <=> rhs.valueCount; cmp != 0 ) return cmp; if ( auto cmp = pValues <=> rhs.pValues; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) && ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); } bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: const char * pLayerName = {}; const char * pSettingName = {}; LayerSettingTypeEXT type = LayerSettingTypeEXT::eBool32; uint32_t valueCount = {}; const void * pValues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LayerSettingEXT; }; #endif // wrapper struct for struct VkLayerSettingsCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkLayerSettingsCreateInfoEXT.html struct LayerSettingsCreateInfoEXT { using NativeType = VkLayerSettingsCreateInfoEXT; static const bool allowDuplicate = true; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eLayerSettingsCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, const LayerSettingEXT * pSettings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , settingCount{ settingCount_ } , pSettings{ pSettings_ } { } VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; LayerSettingsCreateInfoEXT( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingsCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) LayerSettingsCreateInfoEXT( ArrayProxyNoTemporaries const & settings_, const void * pNext_ = nullptr ) : pNext( pNext_ ), settingCount( static_cast( settings_.size() ) ), pSettings( settings_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ LayerSettingsCreateInfoEXT & operator=( LayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ LayerSettingsCreateInfoEXT & operator=( VkLayerSettingsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setSettingCount( uint32_t settingCount_ ) & VULKAN_HPP_NOEXCEPT { settingCount = settingCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT && setSettingCount( uint32_t settingCount_ ) && VULKAN_HPP_NOEXCEPT { settingCount = settingCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT & setPSettings( const LayerSettingEXT * pSettings_ ) & VULKAN_HPP_NOEXCEPT { pSettings = pSettings_; return *this; } VULKAN_HPP_CONSTEXPR_14 LayerSettingsCreateInfoEXT && setPSettings( const LayerSettingEXT * pSettings_ ) && VULKAN_HPP_NOEXCEPT { pSettings = pSettings_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) LayerSettingsCreateInfoEXT & setSettings( ArrayProxyNoTemporaries const & settings_ ) VULKAN_HPP_NOEXCEPT { settingCount = static_cast( settings_.size() ); pSettings = settings_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkLayerSettingsCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerSettingsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkLayerSettingsCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkLayerSettingsCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, settingCount, pSettings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( LayerSettingsCreateInfoEXT const & ) const = default; #else bool operator==( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( settingCount == rhs.settingCount ) && ( pSettings == rhs.pSettings ); # endif } bool operator!=( LayerSettingsCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eLayerSettingsCreateInfoEXT; const void * pNext = {}; uint32_t settingCount = {}; const LayerSettingEXT * pSettings = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = LayerSettingsCreateInfoEXT; }; #endif template <> struct CppType { using Type = LayerSettingsCreateInfoEXT; }; #if defined( VK_USE_PLATFORM_MACOS_MVK ) // wrapper struct for struct VkMacOSSurfaceCreateInfoMVK, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMacOSSurfaceCreateInfoMVK.html struct MacOSSurfaceCreateInfoMVK { using NativeType = VkMacOSSurfaceCreateInfoMVK; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pView{ pView_ } { } VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT : MacOSSurfaceCreateInfoMVK( *reinterpret_cast( &rhs ) ) { } MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setFlags( MacOSSurfaceCreateFlagsMVK flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK && setFlags( MacOSSurfaceCreateFlagsMVK flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & setPView( const void * pView_ ) & VULKAN_HPP_NOEXCEPT { pView = pView_; return *this; } VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK && setPView( const void * pView_ ) && VULKAN_HPP_NOEXCEPT { pView = pView_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMacOSSurfaceCreateInfoMVK const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMacOSSurfaceCreateInfoMVK const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMacOSSurfaceCreateInfoMVK *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pView ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MacOSSurfaceCreateInfoMVK const & ) const = default; # else bool operator==( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); # endif } bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; const void * pNext = {}; MacOSSurfaceCreateFlagsMVK flags = {}; const void * pView = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MacOSSurfaceCreateInfoMVK; }; # endif template <> struct CppType { using Type = MacOSSurfaceCreateInfoMVK; }; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ // wrapper struct for struct VkMappedMemoryRange, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMappedMemoryRange.html struct MappedMemoryRange { using NativeType = VkMappedMemoryRange; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MappedMemoryRange( DeviceMemory memory_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT : MappedMemoryRange( *reinterpret_cast( &rhs ) ) {} MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMappedMemoryRange const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMappedMemoryRange const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMappedMemoryRange *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MappedMemoryRange const & ) const = default; #else bool operator==( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( MappedMemoryRange const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMappedMemoryRange; const void * pNext = {}; DeviceMemory memory = {}; DeviceSize offset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MappedMemoryRange; }; #endif template <> struct CppType { using Type = MappedMemoryRange; }; // wrapper struct for struct VkMemoryAllocateFlagsInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateFlagsInfo.html struct MemoryAllocateFlagsInfo { using NativeType = VkMemoryAllocateFlagsInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , deviceMask{ deviceMask_ } { } VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateFlagsInfo( *reinterpret_cast( &rhs ) ) { } MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setFlags( MemoryAllocateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo && setFlags( MemoryAllocateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) & VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo && setDeviceMask( uint32_t deviceMask_ ) && VULKAN_HPP_NOEXCEPT { deviceMask = deviceMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryAllocateFlagsInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryAllocateFlagsInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryAllocateFlagsInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, deviceMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryAllocateFlagsInfo const & ) const = default; #else bool operator==( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( deviceMask == rhs.deviceMask ); # endif } bool operator!=( MemoryAllocateFlagsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryAllocateFlagsInfo; const void * pNext = {}; MemoryAllocateFlags flags = {}; uint32_t deviceMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryAllocateFlagsInfo; }; #endif template <> struct CppType { using Type = MemoryAllocateFlagsInfo; }; using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; // wrapper struct for struct VkMemoryAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryAllocateInfo.html struct MemoryAllocateInfo { using NativeType = VkMemoryAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , allocationSize{ allocationSize_ } , memoryTypeIndex{ memoryTypeIndex_ } { } VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryAllocateInfo( *reinterpret_cast( &rhs ) ) {} MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setAllocationSize( DeviceSize allocationSize_ ) & VULKAN_HPP_NOEXCEPT { allocationSize = allocationSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo && setAllocationSize( DeviceSize allocationSize_ ) && VULKAN_HPP_NOEXCEPT { allocationSize = allocationSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) & VULKAN_HPP_NOEXCEPT { memoryTypeIndex = memoryTypeIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo && setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) && VULKAN_HPP_NOEXCEPT { memoryTypeIndex = memoryTypeIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, allocationSize, memoryTypeIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryAllocateInfo const & ) const = default; #else bool operator==( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeIndex == rhs.memoryTypeIndex ); # endif } bool operator!=( MemoryAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryAllocateInfo; const void * pNext = {}; DeviceSize allocationSize = {}; uint32_t memoryTypeIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryAllocateInfo; }; #endif template <> struct CppType { using Type = MemoryAllocateInfo; }; // wrapper struct for struct VkMemoryBarrier, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrier.html struct MemoryBarrier { using NativeType = VkMemoryBarrier; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryBarrier( AccessFlags srcAccessMask_ = {}, AccessFlags dstAccessMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcAccessMask{ srcAccessMask_ } , dstAccessMask{ dstAccessMask_ } { } VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrier( *reinterpret_cast( &rhs ) ) {} MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setSrcAccessMask( AccessFlags srcAccessMask_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier && setSrcAccessMask( AccessFlags srcAccessMask_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask = srcAccessMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & setDstAccessMask( AccessFlags dstAccessMask_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrier && setDstAccessMask( AccessFlags dstAccessMask_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask = dstAccessMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryBarrier const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrier const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryBarrier *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcAccessMask, dstAccessMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryBarrier const & ) const = default; #else bool operator==( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask == rhs.srcAccessMask ) && ( dstAccessMask == rhs.dstAccessMask ); # endif } bool operator!=( MemoryBarrier const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryBarrier; const void * pNext = {}; AccessFlags srcAccessMask = {}; AccessFlags dstAccessMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryBarrier; }; #endif template <> struct CppType { using Type = MemoryBarrier; }; // wrapper struct for struct VkMemoryBarrierAccessFlags3KHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryBarrierAccessFlags3KHR.html struct MemoryBarrierAccessFlags3KHR { using NativeType = VkMemoryBarrierAccessFlags3KHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrierAccessFlags3KHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( AccessFlags3KHR srcAccessMask3_ = {}, AccessFlags3KHR dstAccessMask3_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , srcAccessMask3{ srcAccessMask3_ } , dstAccessMask3{ dstAccessMask3_ } { } VULKAN_HPP_CONSTEXPR MemoryBarrierAccessFlags3KHR( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryBarrierAccessFlags3KHR( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryBarrierAccessFlags3KHR( *reinterpret_cast( &rhs ) ) { } MemoryBarrierAccessFlags3KHR & operator=( MemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryBarrierAccessFlags3KHR & operator=( VkMemoryBarrierAccessFlags3KHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setSrcAccessMask3( AccessFlags3KHR srcAccessMask3_ ) & VULKAN_HPP_NOEXCEPT { srcAccessMask3 = srcAccessMask3_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR && setSrcAccessMask3( AccessFlags3KHR srcAccessMask3_ ) && VULKAN_HPP_NOEXCEPT { srcAccessMask3 = srcAccessMask3_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR & setDstAccessMask3( AccessFlags3KHR dstAccessMask3_ ) & VULKAN_HPP_NOEXCEPT { dstAccessMask3 = dstAccessMask3_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryBarrierAccessFlags3KHR && setDstAccessMask3( AccessFlags3KHR dstAccessMask3_ ) && VULKAN_HPP_NOEXCEPT { dstAccessMask3 = dstAccessMask3_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryBarrierAccessFlags3KHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrierAccessFlags3KHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryBarrierAccessFlags3KHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryBarrierAccessFlags3KHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, srcAccessMask3, dstAccessMask3 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryBarrierAccessFlags3KHR const & ) const = default; #else bool operator==( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( srcAccessMask3 == rhs.srcAccessMask3 ) && ( dstAccessMask3 == rhs.dstAccessMask3 ); # endif } bool operator!=( MemoryBarrierAccessFlags3KHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryBarrierAccessFlags3KHR; const void * pNext = {}; AccessFlags3KHR srcAccessMask3 = {}; AccessFlags3KHR dstAccessMask3 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryBarrierAccessFlags3KHR; }; #endif template <> struct CppType { using Type = MemoryBarrierAccessFlags3KHR; }; // wrapper struct for struct VkMemoryDedicatedAllocateInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfo.html struct MemoryDedicatedAllocateInfo { using NativeType = VkMemoryDedicatedAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( Image image_ = {}, Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , buffer{ buffer_ } { } VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryDedicatedAllocateInfo( *reinterpret_cast( &rhs ) ) { } MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setImage( Image image_ ) & VULKAN_HPP_NOEXCEPT { image = image_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo && setImage( Image image_ ) && VULKAN_HPP_NOEXCEPT { image = image_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryDedicatedAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image, buffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryDedicatedAllocateInfo const & ) const = default; #else bool operator==( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image == rhs.image ) && ( buffer == rhs.buffer ); # endif } bool operator!=( MemoryDedicatedAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; const void * pNext = {}; Image image = {}; Buffer buffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryDedicatedAllocateInfo; }; #endif template <> struct CppType { using Type = MemoryDedicatedAllocateInfo; }; using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; // wrapper struct for struct VkMemoryDedicatedAllocateInfoTensorARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedAllocateInfoTensorARM.html struct MemoryDedicatedAllocateInfoTensorARM { using NativeType = VkMemoryDedicatedAllocateInfoTensorARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfoTensorARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM( TensorARM tensor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensor{ tensor_ } { } VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfoTensorARM( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedAllocateInfoTensorARM( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryDedicatedAllocateInfoTensorARM( *reinterpret_cast( &rhs ) ) { } MemoryDedicatedAllocateInfoTensorARM & operator=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryDedicatedAllocateInfoTensorARM & operator=( VkMemoryDedicatedAllocateInfoTensorARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM & setTensor( TensorARM tensor_ ) & VULKAN_HPP_NOEXCEPT { tensor = tensor_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfoTensorARM && setTensor( TensorARM tensor_ ) && VULKAN_HPP_NOEXCEPT { tensor = tensor_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryDedicatedAllocateInfoTensorARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfoTensorARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfoTensorARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryDedicatedAllocateInfoTensorARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryDedicatedAllocateInfoTensorARM const & ) const = default; #else bool operator==( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensor == rhs.tensor ); # endif } bool operator!=( MemoryDedicatedAllocateInfoTensorARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryDedicatedAllocateInfoTensorARM; const void * pNext = {}; TensorARM tensor = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryDedicatedAllocateInfoTensorARM; }; #endif template <> struct CppType { using Type = MemoryDedicatedAllocateInfoTensorARM; }; // wrapper struct for struct VkMemoryDedicatedRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryDedicatedRequirements.html struct MemoryDedicatedRequirements { using NativeType = VkMemoryDedicatedRequirements; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( Bool32 prefersDedicatedAllocation_ = {}, Bool32 requiresDedicatedAllocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , prefersDedicatedAllocation{ prefersDedicatedAllocation_ } , requiresDedicatedAllocation{ requiresDedicatedAllocation_ } { } VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryDedicatedRequirements( *reinterpret_cast( &rhs ) ) { } MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryDedicatedRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryDedicatedRequirements const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryDedicatedRequirements *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, prefersDedicatedAllocation, requiresDedicatedAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryDedicatedRequirements const & ) const = default; #else bool operator==( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); # endif } bool operator!=( MemoryDedicatedRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryDedicatedRequirements; void * pNext = {}; Bool32 prefersDedicatedAllocation = {}; Bool32 requiresDedicatedAllocation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryDedicatedRequirements; }; #endif template <> struct CppType { using Type = MemoryDedicatedRequirements; }; using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; // wrapper struct for struct VkMemoryFdPropertiesKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryFdPropertiesKHR.html struct MemoryFdPropertiesKHR { using NativeType = VkMemoryFdPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryFdPropertiesKHR( *reinterpret_cast( &rhs ) ) { } MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryFdPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryFdPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryFdPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryFdPropertiesKHR const & ) const = default; #else bool operator==( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryFdPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryFdPropertiesKHR; void * pNext = {}; uint32_t memoryTypeBits = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryFdPropertiesKHR; }; #endif template <> struct CppType { using Type = MemoryFdPropertiesKHR; }; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkMemoryGetAndroidHardwareBufferInfoANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html struct MemoryGetAndroidHardwareBufferInfoANDROID { using NativeType = VkMemoryGetAndroidHardwareBufferInfoANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } { } VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast( &rhs ) ) { } MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetAndroidHardwareBufferInfoANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetAndroidHardwareBufferInfoANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetAndroidHardwareBufferInfoANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const & ) const = default; # else bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); # endif } bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; const void * pNext = {}; DeviceMemory memory = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetAndroidHardwareBufferInfoANDROID; }; # endif template <> struct CppType { using Type = MemoryGetAndroidHardwareBufferInfoANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ // wrapper struct for struct VkMemoryGetFdInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetFdInfoKHR.html struct MemoryGetFdInfoKHR { using NativeType = VkMemoryGetFdInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( DeviceMemory memory_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetFdInfoKHR( *reinterpret_cast( &rhs ) ) {} MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetFdInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetFdInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetFdInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetFdInfoKHR const & ) const = default; #else bool operator==( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } bool operator!=( MemoryGetFdInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryGetFdInfoKHR; const void * pNext = {}; DeviceMemory memory = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetFdInfoKHR; }; #endif template <> struct CppType { using Type = MemoryGetFdInfoKHR; }; #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkMemoryGetMetalHandleInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetMetalHandleInfoEXT.html struct MemoryGetMetalHandleInfoEXT { using NativeType = VkMemoryGetMetalHandleInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetMetalHandleInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( DeviceMemory memory_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR MemoryGetMetalHandleInfoEXT( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetMetalHandleInfoEXT( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetMetalHandleInfoEXT( *reinterpret_cast( &rhs ) ) { } MemoryGetMetalHandleInfoEXT & operator=( MemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetMetalHandleInfoEXT & operator=( VkMemoryGetMetalHandleInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetMetalHandleInfoEXT && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetMetalHandleInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetMetalHandleInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetMetalHandleInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetMetalHandleInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, handleType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetMetalHandleInfoEXT const & ) const = default; # else bool operator==( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } bool operator!=( MemoryGetMetalHandleInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryGetMetalHandleInfoEXT; const void * pNext = {}; DeviceMemory memory = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetMetalHandleInfoEXT; }; # endif template <> struct CppType { using Type = MemoryGetMetalHandleInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkMemoryGetNativeBufferInfoOHOS, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetNativeBufferInfoOHOS.html struct MemoryGetNativeBufferInfoOHOS { using NativeType = VkMemoryGetNativeBufferInfoOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetNativeBufferInfoOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetNativeBufferInfoOHOS( DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } { } VULKAN_HPP_CONSTEXPR MemoryGetNativeBufferInfoOHOS( MemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetNativeBufferInfoOHOS( VkMemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetNativeBufferInfoOHOS( *reinterpret_cast( &rhs ) ) { } MemoryGetNativeBufferInfoOHOS & operator=( MemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetNativeBufferInfoOHOS & operator=( VkMemoryGetNativeBufferInfoOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetNativeBufferInfoOHOS && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetNativeBufferInfoOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetNativeBufferInfoOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetNativeBufferInfoOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetNativeBufferInfoOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetNativeBufferInfoOHOS const & ) const = default; # else bool operator==( MemoryGetNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ); # endif } bool operator!=( MemoryGetNativeBufferInfoOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryGetNativeBufferInfoOHOS; const void * pNext = {}; DeviceMemory memory = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetNativeBufferInfoOHOS; }; # endif template <> struct CppType { using Type = MemoryGetNativeBufferInfoOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ // wrapper struct for struct VkMemoryGetRemoteAddressInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetRemoteAddressInfoNV.html struct MemoryGetRemoteAddressInfoNV { using NativeType = VkMemoryGetRemoteAddressInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetRemoteAddressInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( DeviceMemory memory_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR MemoryGetRemoteAddressInfoNV( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetRemoteAddressInfoNV( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetRemoteAddressInfoNV( *reinterpret_cast( &rhs ) ) { } MemoryGetRemoteAddressInfoNV & operator=( MemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetRemoteAddressInfoNV & operator=( VkMemoryGetRemoteAddressInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetRemoteAddressInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetRemoteAddressInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetRemoteAddressInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetRemoteAddressInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetRemoteAddressInfoNV const & ) const = default; #else bool operator==( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } bool operator!=( MemoryGetRemoteAddressInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryGetRemoteAddressInfoNV; const void * pNext = {}; DeviceMemory memory = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetRemoteAddressInfoNV; }; #endif template <> struct CppType { using Type = MemoryGetRemoteAddressInfoNV; }; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkMemoryGetWin32HandleInfoKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetWin32HandleInfoKHR.html struct MemoryGetWin32HandleInfoKHR { using NativeType = VkMemoryGetWin32HandleInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( DeviceMemory memory_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetWin32HandleInfoKHR( *reinterpret_cast( &rhs ) ) { } MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetWin32HandleInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetWin32HandleInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetWin32HandleInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, handleType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetWin32HandleInfoKHR const & ) const = default; # else bool operator==( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } bool operator!=( MemoryGetWin32HandleInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; const void * pNext = {}; DeviceMemory memory = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetWin32HandleInfoKHR; }; # endif template <> struct CppType { using Type = MemoryGetWin32HandleInfoKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkMemoryGetZirconHandleInfoFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryGetZirconHandleInfoFUCHSIA.html struct MemoryGetZirconHandleInfoFUCHSIA { using NativeType = VkMemoryGetZirconHandleInfoFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( DeviceMemory memory_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memory{ memory_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR MemoryGetZirconHandleInfoFUCHSIA( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryGetZirconHandleInfoFUCHSIA( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryGetZirconHandleInfoFUCHSIA( *reinterpret_cast( &rhs ) ) { } MemoryGetZirconHandleInfoFUCHSIA & operator=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryGetZirconHandleInfoFUCHSIA & operator=( VkMemoryGetZirconHandleInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryGetZirconHandleInfoFUCHSIA && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryGetZirconHandleInfoFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetZirconHandleInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryGetZirconHandleInfoFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryGetZirconHandleInfoFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memory, handleType ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryGetZirconHandleInfoFUCHSIA const & ) const = default; # else bool operator==( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memory == rhs.memory ) && ( handleType == rhs.handleType ); # endif } bool operator!=( MemoryGetZirconHandleInfoFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryGetZirconHandleInfoFUCHSIA; const void * pNext = {}; DeviceMemory memory = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryGetZirconHandleInfoFUCHSIA; }; # endif template <> struct CppType { using Type = MemoryGetZirconHandleInfoFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ // wrapper struct for struct VkMemoryHeap, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHeap.html struct MemoryHeap { using NativeType = VkMemoryHeap; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryHeap( DeviceSize size_ = {}, MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT : size{ size_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHeap( *reinterpret_cast( &rhs ) ) {} MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryHeap const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryHeap const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryHeap *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( size, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryHeap const & ) const = default; #else bool operator==( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( size == rhs.size ) && ( flags == rhs.flags ); # endif } bool operator!=( MemoryHeap const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize size = {}; MemoryHeapFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryHeap; }; #endif // wrapper struct for struct VkMemoryHostPointerPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryHostPointerPropertiesEXT.html struct MemoryHostPointerPropertiesEXT { using NativeType = VkMemoryHostPointerPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryHostPointerPropertiesEXT( *reinterpret_cast( &rhs ) ) { } MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryHostPointerPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryHostPointerPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryHostPointerPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryHostPointerPropertiesEXT const & ) const = default; #else bool operator==( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryHostPointerPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; void * pNext = {}; uint32_t memoryTypeBits = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryHostPointerPropertiesEXT; }; #endif template <> struct CppType { using Type = MemoryHostPointerPropertiesEXT; }; // wrapper struct for struct VkMemoryMapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapInfo.html struct MemoryMapInfo { using NativeType = VkMemoryMapInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapFlags flags_ = {}, DeviceMemory memory_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , memory{ memory_ } , offset{ offset_ } , size{ size_ } { } VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfo( *reinterpret_cast( &rhs ) ) {} MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( MemoryMapFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo && setFlags( MemoryMapFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMapInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryMapInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, memory, offset, size ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryMapInfo const & ) const = default; #else bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ) && ( offset == rhs.offset ) && ( size == rhs.size ); # endif } bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryMapInfo; const void * pNext = {}; MemoryMapFlags flags = {}; DeviceMemory memory = {}; DeviceSize offset = {}; DeviceSize size = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryMapInfo; }; #endif template <> struct CppType { using Type = MemoryMapInfo; }; using MemoryMapInfoKHR = MemoryMapInfo; // wrapper struct for struct VkMemoryMapPlacedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMapPlacedInfoEXT.html struct MemoryMapPlacedInfoEXT { using NativeType = VkMemoryMapPlacedInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapPlacedInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pPlacedAddress{ pPlacedAddress_ } { } VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryMapPlacedInfoEXT( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapPlacedInfoEXT( *reinterpret_cast( &rhs ) ) { } MemoryMapPlacedInfoEXT & operator=( MemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryMapPlacedInfoEXT & operator=( VkMemoryMapPlacedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT & setPPlacedAddress( void * pPlacedAddress_ ) & VULKAN_HPP_NOEXCEPT { pPlacedAddress = pPlacedAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryMapPlacedInfoEXT && setPPlacedAddress( void * pPlacedAddress_ ) && VULKAN_HPP_NOEXCEPT { pPlacedAddress = pPlacedAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryMapPlacedInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMapPlacedInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMapPlacedInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryMapPlacedInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pPlacedAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryMapPlacedInfoEXT const & ) const = default; #else bool operator==( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pPlacedAddress == rhs.pPlacedAddress ); # endif } bool operator!=( MemoryMapPlacedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryMapPlacedInfoEXT; const void * pNext = {}; void * pPlacedAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryMapPlacedInfoEXT; }; #endif template <> struct CppType { using Type = MemoryMapPlacedInfoEXT; }; #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkMemoryMetalHandlePropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryMetalHandlePropertiesEXT.html struct MemoryMetalHandlePropertiesEXT { using NativeType = VkMemoryMetalHandlePropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMetalHandlePropertiesEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryMetalHandlePropertiesEXT( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryMetalHandlePropertiesEXT( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMetalHandlePropertiesEXT( *reinterpret_cast( &rhs ) ) { } MemoryMetalHandlePropertiesEXT & operator=( MemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryMetalHandlePropertiesEXT & operator=( VkMemoryMetalHandlePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryMetalHandlePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMetalHandlePropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryMetalHandlePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryMetalHandlePropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryMetalHandlePropertiesEXT const & ) const = default; # else bool operator==( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryMetalHandlePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryMetalHandlePropertiesEXT; void * pNext = {}; uint32_t memoryTypeBits = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryMetalHandlePropertiesEXT; }; # endif template <> struct CppType { using Type = MemoryMetalHandlePropertiesEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ // wrapper struct for struct VkMemoryOpaqueCaptureAddressAllocateInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryOpaqueCaptureAddressAllocateInfo.html struct MemoryOpaqueCaptureAddressAllocateInfo { using NativeType = VkMemoryOpaqueCaptureAddressAllocateInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , opaqueCaptureAddress{ opaqueCaptureAddress_ } { } VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast( &rhs ) ) { } MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) & VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo && setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) && VULKAN_HPP_NOEXCEPT { opaqueCaptureAddress = opaqueCaptureAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryOpaqueCaptureAddressAllocateInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryOpaqueCaptureAddressAllocateInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryOpaqueCaptureAddressAllocateInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, opaqueCaptureAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const & ) const = default; #else bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress ); # endif } bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo; const void * pNext = {}; uint64_t opaqueCaptureAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryOpaqueCaptureAddressAllocateInfo; }; #endif template <> struct CppType { using Type = MemoryOpaqueCaptureAddressAllocateInfo; }; using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo; // wrapper struct for struct VkMemoryPriorityAllocateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryPriorityAllocateInfoEXT.html struct MemoryPriorityAllocateInfoEXT { using NativeType = VkMemoryPriorityAllocateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , priority{ priority_ } { } VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryPriorityAllocateInfoEXT( *reinterpret_cast( &rhs ) ) { } MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) & VULKAN_HPP_NOEXCEPT { priority = priority_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT && setPriority( float priority_ ) && VULKAN_HPP_NOEXCEPT { priority = priority_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryPriorityAllocateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryPriorityAllocateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryPriorityAllocateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, priority ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryPriorityAllocateInfoEXT const & ) const = default; #else bool operator==( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priority == rhs.priority ); # endif } bool operator!=( MemoryPriorityAllocateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; const void * pNext = {}; float priority = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryPriorityAllocateInfoEXT; }; #endif template <> struct CppType { using Type = MemoryPriorityAllocateInfoEXT; }; // wrapper struct for struct VkMemoryRequirements, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements.html struct MemoryRequirements { using NativeType = VkMemoryRequirements; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryRequirements( DeviceSize size_ = {}, DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT : size{ size_ } , alignment{ alignment_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements( *reinterpret_cast( &rhs ) ) {} MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryRequirements const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryRequirements const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryRequirements *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( size, alignment, memoryTypeBits ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryRequirements const & ) const = default; #else bool operator==( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( size == rhs.size ) && ( alignment == rhs.alignment ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryRequirements const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: DeviceSize size = {}; DeviceSize alignment = {}; uint32_t memoryTypeBits = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryRequirements; }; #endif // wrapper struct for struct VkMemoryRequirements2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryRequirements2.html struct MemoryRequirements2 { using NativeType = VkMemoryRequirements2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryRequirements{ memoryRequirements_ } { } VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryRequirements2( *reinterpret_cast( &rhs ) ) { } MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryRequirements2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryRequirements2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryRequirements2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryRequirements ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryRequirements2 const & ) const = default; #else bool operator==( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryRequirements == rhs.memoryRequirements ); # endif } bool operator!=( MemoryRequirements2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryRequirements2; void * pNext = {}; MemoryRequirements memoryRequirements = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryRequirements2; }; #endif template <> struct CppType { using Type = MemoryRequirements2; }; using MemoryRequirements2KHR = MemoryRequirements2; // wrapper struct for struct VkMemoryType, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryType.html struct MemoryType { using NativeType = VkMemoryType; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryType( MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT : propertyFlags{ propertyFlags_ } , heapIndex{ heapIndex_ } { } VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryType( *reinterpret_cast( &rhs ) ) {} MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryType const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryType &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryType const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryType *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( propertyFlags, heapIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryType const & ) const = default; #else bool operator==( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( propertyFlags == rhs.propertyFlags ) && ( heapIndex == rhs.heapIndex ); # endif } bool operator!=( MemoryType const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: MemoryPropertyFlags propertyFlags = {}; uint32_t heapIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryType; }; #endif // wrapper struct for struct VkMemoryUnmapInfo, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryUnmapInfo.html struct MemoryUnmapInfo { using NativeType = VkMemoryUnmapInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapFlags flags_ = {}, DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , memory{ memory_ } { } VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfo( *reinterpret_cast( &rhs ) ) {} MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( MemoryUnmapFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo && setFlags( MemoryUnmapFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( DeviceMemory memory_ ) & VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo && setMemory( DeviceMemory memory_ ) && VULKAN_HPP_NOEXCEPT { memory = memory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryUnmapInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryUnmapInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, memory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryUnmapInfo const & ) const = default; #else bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( memory == rhs.memory ); # endif } bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMemoryUnmapInfo; const void * pNext = {}; MemoryUnmapFlags flags = {}; DeviceMemory memory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryUnmapInfo; }; #endif template <> struct CppType { using Type = MemoryUnmapInfo; }; using MemoryUnmapInfoKHR = MemoryUnmapInfo; #if defined( VK_USE_PLATFORM_WIN32_KHR ) // wrapper struct for struct VkMemoryWin32HandlePropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryWin32HandlePropertiesKHR.html struct MemoryWin32HandlePropertiesKHR { using NativeType = VkMemoryWin32HandlePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryWin32HandlePropertiesKHR( *reinterpret_cast( &rhs ) ) { } MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryWin32HandlePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryWin32HandlePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryWin32HandlePropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryWin32HandlePropertiesKHR const & ) const = default; # else bool operator==( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryWin32HandlePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; void * pNext = {}; uint32_t memoryTypeBits = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryWin32HandlePropertiesKHR; }; # endif template <> struct CppType { using Type = MemoryWin32HandlePropertiesKHR; }; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) // wrapper struct for struct VkMemoryZirconHandlePropertiesFUCHSIA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMemoryZirconHandlePropertiesFUCHSIA.html struct MemoryZirconHandlePropertiesFUCHSIA { using NativeType = VkMemoryZirconHandlePropertiesFUCHSIA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; MemoryZirconHandlePropertiesFUCHSIA( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryZirconHandlePropertiesFUCHSIA( *reinterpret_cast( &rhs ) ) { } MemoryZirconHandlePropertiesFUCHSIA & operator=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MemoryZirconHandlePropertiesFUCHSIA & operator=( VkMemoryZirconHandlePropertiesFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMemoryZirconHandlePropertiesFUCHSIA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryZirconHandlePropertiesFUCHSIA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMemoryZirconHandlePropertiesFUCHSIA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMemoryZirconHandlePropertiesFUCHSIA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryTypeBits ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MemoryZirconHandlePropertiesFUCHSIA const & ) const = default; # else bool operator==( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( MemoryZirconHandlePropertiesFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMemoryZirconHandlePropertiesFUCHSIA; void * pNext = {}; uint32_t memoryTypeBits = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MemoryZirconHandlePropertiesFUCHSIA; }; # endif template <> struct CppType { using Type = MemoryZirconHandlePropertiesFUCHSIA; }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) // wrapper struct for struct VkMetalSurfaceCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMetalSurfaceCreateInfoEXT.html struct MetalSurfaceCreateInfoEXT { using NativeType = VkMetalSurfaceCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pLayer{ pLayer_ } { } VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setFlags( MetalSurfaceCreateFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT && setFlags( MetalSurfaceCreateFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer * pLayer_ ) & VULKAN_HPP_NOEXCEPT { pLayer = pLayer_; return *this; } VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT && setPLayer( const CAMetalLayer * pLayer_ ) && VULKAN_HPP_NOEXCEPT { pLayer = pLayer_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMetalSurfaceCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMetalSurfaceCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMetalSurfaceCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pLayer ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MetalSurfaceCreateInfoEXT const & ) const = default; # else bool operator==( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pLayer == rhs.pLayer ); # endif } bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; const void * pNext = {}; MetalSurfaceCreateFlagsEXT flags = {}; const CAMetalLayer * pLayer = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MetalSurfaceCreateInfoEXT; }; # endif template <> struct CppType { using Type = MetalSurfaceCreateInfoEXT; }; #endif /*VK_USE_PLATFORM_METAL_EXT*/ // wrapper struct for struct VkMicromapBuildInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildInfoEXT.html struct MicromapBuildInfoEXT { using NativeType = VkMicromapBuildInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapTypeEXT type_ = MicromapTypeEXT::eOpacityMicromap, BuildMicromapFlagsEXT flags_ = {}, BuildMicromapModeEXT mode_ = BuildMicromapModeEXT::eBuild, MicromapEXT dstMicromap_ = {}, uint32_t usageCountsCount_ = {}, const MicromapUsageEXT * pUsageCounts_ = {}, const MicromapUsageEXT * const * ppUsageCounts_ = {}, DeviceOrHostAddressConstKHR data_ = {}, DeviceOrHostAddressKHR scratchData_ = {}, DeviceOrHostAddressConstKHR triangleArray_ = {}, DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , flags{ flags_ } , mode{ mode_ } , dstMicromap{ dstMicromap_ } , usageCountsCount{ usageCountsCount_ } , pUsageCounts{ pUsageCounts_ } , ppUsageCounts{ ppUsageCounts_ } , data{ data_ } , scratchData{ scratchData_ } , triangleArray{ triangleArray_ } , triangleArrayStride{ triangleArrayStride_ } { } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapBuildInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MicromapBuildInfoEXT( MicromapTypeEXT type_, BuildMicromapFlagsEXT flags_, BuildMicromapModeEXT mode_, MicromapEXT dstMicromap_, ArrayProxyNoTemporaries const & usageCounts_, ArrayProxyNoTemporaries const & pUsageCounts_ = {}, DeviceOrHostAddressConstKHR data_ = {}, DeviceOrHostAddressKHR scratchData_ = {}, DeviceOrHostAddressConstKHR triangleArray_ = {}, DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr ) : pNext( pNext_ ) , type( type_ ) , flags( flags_ ) , mode( mode_ ) , dstMicromap( dstMicromap_ ) , usageCountsCount( static_cast( !usageCounts_.empty() ? usageCounts_.size() : pUsageCounts_.size() ) ) , pUsageCounts( usageCounts_.data() ) , ppUsageCounts( pUsageCounts_.data() ) , data( data_ ) , scratchData( scratchData_ ) , triangleArray( triangleArray_ ) , triangleArrayStride( triangleArrayStride_ ) { # ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); # else if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) { throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); } # endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ MicromapBuildInfoEXT & operator=( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapBuildInfoEXT & operator=( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setType( MicromapTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setType( MicromapTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setFlags( BuildMicromapFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setFlags( BuildMicromapFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setMode( BuildMicromapModeEXT mode_ ) & VULKAN_HPP_NOEXCEPT { mode = mode_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setMode( BuildMicromapModeEXT mode_ ) && VULKAN_HPP_NOEXCEPT { mode = mode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setDstMicromap( MicromapEXT dstMicromap_ ) & VULKAN_HPP_NOEXCEPT { dstMicromap = dstMicromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setDstMicromap( MicromapEXT dstMicromap_ ) && VULKAN_HPP_NOEXCEPT { dstMicromap = dstMicromap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setUsageCountsCount( uint32_t usageCountsCount_ ) & VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setUsageCountsCount( uint32_t usageCountsCount_ ) && VULKAN_HPP_NOEXCEPT { usageCountsCount = usageCountsCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setPUsageCounts( const MicromapUsageEXT * pUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { pUsageCounts = pUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MicromapBuildInfoEXT & setUsageCounts( ArrayProxyNoTemporaries const & usageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( usageCounts_.size() ); pUsageCounts = usageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) & VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setPpUsageCounts( const MicromapUsageEXT * const * ppUsageCounts_ ) && VULKAN_HPP_NOEXCEPT { ppUsageCounts = ppUsageCounts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MicromapBuildInfoEXT & setPUsageCounts( ArrayProxyNoTemporaries const & pUsageCounts_ ) VULKAN_HPP_NOEXCEPT { usageCountsCount = static_cast( pUsageCounts_.size() ); ppUsageCounts = pUsageCounts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setData( DeviceOrHostAddressConstKHR const & data_ ) & VULKAN_HPP_NOEXCEPT { data = data_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setData( DeviceOrHostAddressConstKHR const & data_ ) && VULKAN_HPP_NOEXCEPT { data = data_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) & VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setScratchData( DeviceOrHostAddressKHR const & scratchData_ ) && VULKAN_HPP_NOEXCEPT { scratchData = scratchData_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArray( DeviceOrHostAddressConstKHR const & triangleArray_ ) & VULKAN_HPP_NOEXCEPT { triangleArray = triangleArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setTriangleArray( DeviceOrHostAddressConstKHR const & triangleArray_ ) && VULKAN_HPP_NOEXCEPT { triangleArray = triangleArray_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setTriangleArrayStride( DeviceSize triangleArrayStride_ ) & VULKAN_HPP_NOEXCEPT { triangleArrayStride = triangleArrayStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT && setTriangleArrayStride( DeviceSize triangleArrayStride_ ) && VULKAN_HPP_NOEXCEPT { triangleArrayStride = triangleArrayStride_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapBuildInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapBuildInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapBuildInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapBuildInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, flags, mode, dstMicromap, usageCountsCount, pUsageCounts, ppUsageCounts, data, scratchData, triangleArray, triangleArrayStride ); } #endif public: StructureType sType = StructureType::eMicromapBuildInfoEXT; const void * pNext = {}; MicromapTypeEXT type = MicromapTypeEXT::eOpacityMicromap; BuildMicromapFlagsEXT flags = {}; BuildMicromapModeEXT mode = BuildMicromapModeEXT::eBuild; MicromapEXT dstMicromap = {}; uint32_t usageCountsCount = {}; const MicromapUsageEXT * pUsageCounts = {}; const MicromapUsageEXT * const * ppUsageCounts = {}; DeviceOrHostAddressConstKHR data = {}; DeviceOrHostAddressKHR scratchData = {}; DeviceOrHostAddressConstKHR triangleArray = {}; DeviceSize triangleArrayStride = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapBuildInfoEXT; }; #endif template <> struct CppType { using Type = MicromapBuildInfoEXT; }; // wrapper struct for struct VkMicromapBuildSizesInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapBuildSizesInfoEXT.html struct MicromapBuildSizesInfoEXT { using NativeType = VkMicromapBuildSizesInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapBuildSizesInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( DeviceSize micromapSize_ = {}, DeviceSize buildScratchSize_ = {}, Bool32 discardable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , micromapSize{ micromapSize_ } , buildScratchSize{ buildScratchSize_ } , discardable{ discardable_ } { } VULKAN_HPP_CONSTEXPR MicromapBuildSizesInfoEXT( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapBuildSizesInfoEXT( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapBuildSizesInfoEXT( *reinterpret_cast( &rhs ) ) { } MicromapBuildSizesInfoEXT & operator=( MicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapBuildSizesInfoEXT & operator=( VkMicromapBuildSizesInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setMicromapSize( DeviceSize micromapSize_ ) & VULKAN_HPP_NOEXCEPT { micromapSize = micromapSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT && setMicromapSize( DeviceSize micromapSize_ ) && VULKAN_HPP_NOEXCEPT { micromapSize = micromapSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setBuildScratchSize( DeviceSize buildScratchSize_ ) & VULKAN_HPP_NOEXCEPT { buildScratchSize = buildScratchSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT && setBuildScratchSize( DeviceSize buildScratchSize_ ) && VULKAN_HPP_NOEXCEPT { buildScratchSize = buildScratchSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT & setDiscardable( Bool32 discardable_ ) & VULKAN_HPP_NOEXCEPT { discardable = discardable_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildSizesInfoEXT && setDiscardable( Bool32 discardable_ ) && VULKAN_HPP_NOEXCEPT { discardable = discardable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapBuildSizesInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapBuildSizesInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapBuildSizesInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapBuildSizesInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, micromapSize, buildScratchSize, discardable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MicromapBuildSizesInfoEXT const & ) const = default; #else bool operator==( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromapSize == rhs.micromapSize ) && ( buildScratchSize == rhs.buildScratchSize ) && ( discardable == rhs.discardable ); # endif } bool operator!=( MicromapBuildSizesInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMicromapBuildSizesInfoEXT; const void * pNext = {}; DeviceSize micromapSize = {}; DeviceSize buildScratchSize = {}; Bool32 discardable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapBuildSizesInfoEXT; }; #endif template <> struct CppType { using Type = MicromapBuildSizesInfoEXT; }; // wrapper struct for struct VkMicromapCreateInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapCreateInfoEXT.html struct MicromapCreateInfoEXT { using NativeType = VkMicromapCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateFlagsEXT createFlags_ = {}, Buffer buffer_ = {}, DeviceSize offset_ = {}, DeviceSize size_ = {}, MicromapTypeEXT type_ = MicromapTypeEXT::eOpacityMicromap, DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , createFlags{ createFlags_ } , buffer{ buffer_ } , offset{ offset_ } , size{ size_ } , type{ type_ } , deviceAddress{ deviceAddress_ } { } VULKAN_HPP_CONSTEXPR MicromapCreateInfoEXT( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapCreateInfoEXT( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } MicromapCreateInfoEXT & operator=( MicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapCreateInfoEXT & operator=( VkMicromapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setCreateFlags( MicromapCreateFlagsEXT createFlags_ ) & VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setCreateFlags( MicromapCreateFlagsEXT createFlags_ ) && VULKAN_HPP_NOEXCEPT { createFlags = createFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setBuffer( Buffer buffer_ ) & VULKAN_HPP_NOEXCEPT { buffer = buffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setBuffer( Buffer buffer_ ) && VULKAN_HPP_NOEXCEPT { buffer = buffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setOffset( DeviceSize offset_ ) & VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setOffset( DeviceSize offset_ ) && VULKAN_HPP_NOEXCEPT { offset = offset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setSize( DeviceSize size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setSize( DeviceSize size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setType( MicromapTypeEXT type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setType( MicromapTypeEXT type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setDeviceAddress( DeviceAddress deviceAddress_ ) & VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT && setDeviceAddress( DeviceAddress deviceAddress_ ) && VULKAN_HPP_NOEXCEPT { deviceAddress = deviceAddress_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, createFlags, buffer, offset, size, type, deviceAddress ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MicromapCreateInfoEXT const & ) const = default; #else bool operator==( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( createFlags == rhs.createFlags ) && ( buffer == rhs.buffer ) && ( offset == rhs.offset ) && ( size == rhs.size ) && ( type == rhs.type ) && ( deviceAddress == rhs.deviceAddress ); # endif } bool operator!=( MicromapCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMicromapCreateInfoEXT; const void * pNext = {}; MicromapCreateFlagsEXT createFlags = {}; Buffer buffer = {}; DeviceSize offset = {}; DeviceSize size = {}; MicromapTypeEXT type = MicromapTypeEXT::eOpacityMicromap; DeviceAddress deviceAddress = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapCreateInfoEXT; }; #endif template <> struct CppType { using Type = MicromapCreateInfoEXT; }; // wrapper struct for struct VkMicromapTriangleEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapTriangleEXT.html struct MicromapTriangleEXT { using NativeType = VkMicromapTriangleEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT : dataOffset{ dataOffset_ } , subdivisionLevel{ subdivisionLevel_ } , format{ format_ } { } VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapTriangleEXT( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapTriangleEXT( *reinterpret_cast( &rhs ) ) { } MicromapTriangleEXT & operator=( MicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapTriangleEXT & operator=( VkMicromapTriangleEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setDataOffset( uint32_t dataOffset_ ) & VULKAN_HPP_NOEXCEPT { dataOffset = dataOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT && setDataOffset( uint32_t dataOffset_ ) && VULKAN_HPP_NOEXCEPT { dataOffset = dataOffset_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setSubdivisionLevel( uint16_t subdivisionLevel_ ) & VULKAN_HPP_NOEXCEPT { subdivisionLevel = subdivisionLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT && setSubdivisionLevel( uint16_t subdivisionLevel_ ) && VULKAN_HPP_NOEXCEPT { subdivisionLevel = subdivisionLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT & setFormat( uint16_t format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapTriangleEXT && setFormat( uint16_t format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapTriangleEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapTriangleEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapTriangleEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapTriangleEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( dataOffset, subdivisionLevel, format ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MicromapTriangleEXT const & ) const = default; #else bool operator==( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( dataOffset == rhs.dataOffset ) && ( subdivisionLevel == rhs.subdivisionLevel ) && ( format == rhs.format ); # endif } bool operator!=( MicromapTriangleEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t dataOffset = {}; uint16_t subdivisionLevel = {}; uint16_t format = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapTriangleEXT; }; #endif // wrapper struct for struct VkMicromapVersionInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMicromapVersionInfoEXT.html struct MicromapVersionInfoEXT { using NativeType = VkMicromapVersionInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMicromapVersionInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pVersionData{ pVersionData_ } { } VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MicromapVersionInfoEXT( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapVersionInfoEXT( *reinterpret_cast( &rhs ) ) { } MicromapVersionInfoEXT & operator=( MicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MicromapVersionInfoEXT & operator=( VkMicromapVersionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPVersionData( const uint8_t * pVersionData_ ) & VULKAN_HPP_NOEXCEPT { pVersionData = pVersionData_; return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT && setPVersionData( const uint8_t * pVersionData_ ) && VULKAN_HPP_NOEXCEPT { pVersionData = pVersionData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMicromapVersionInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapVersionInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMicromapVersionInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMicromapVersionInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pVersionData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MicromapVersionInfoEXT const & ) const = default; #else bool operator==( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pVersionData == rhs.pVersionData ); # endif } bool operator!=( MicromapVersionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMicromapVersionInfoEXT; const void * pNext = {}; const uint8_t * pVersionData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MicromapVersionInfoEXT; }; #endif template <> struct CppType { using Type = MicromapVersionInfoEXT; }; // wrapper struct for struct VkMultiDrawIndexedInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawIndexedInfoEXT.html struct MultiDrawIndexedInfoEXT { using NativeType = VkMultiDrawIndexedInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT : firstIndex{ firstIndex_ } , indexCount{ indexCount_ } , vertexOffset{ vertexOffset_ } { } VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) { } MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultiDrawIndexedInfoEXT & operator=( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) & VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT && setFirstIndex( uint32_t firstIndex_ ) && VULKAN_HPP_NOEXCEPT { firstIndex = firstIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setIndexCount( uint32_t indexCount_ ) & VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT && setIndexCount( uint32_t indexCount_ ) && VULKAN_HPP_NOEXCEPT { indexCount = indexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setVertexOffset( int32_t vertexOffset_ ) & VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT && setVertexOffset( int32_t vertexOffset_ ) && VULKAN_HPP_NOEXCEPT { vertexOffset = vertexOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMultiDrawIndexedInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiDrawIndexedInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiDrawIndexedInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultiDrawIndexedInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( firstIndex, indexCount, vertexOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultiDrawIndexedInfoEXT const & ) const = default; #else bool operator==( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( firstIndex == rhs.firstIndex ) && ( indexCount == rhs.indexCount ) && ( vertexOffset == rhs.vertexOffset ); # endif } bool operator!=( MultiDrawIndexedInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t firstIndex = {}; uint32_t indexCount = {}; int32_t vertexOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultiDrawIndexedInfoEXT; }; #endif // wrapper struct for struct VkMultiDrawInfoEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiDrawInfoEXT.html struct MultiDrawInfoEXT { using NativeType = VkMultiDrawInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT : firstVertex{ firstVertex_ } , vertexCount{ vertexCount_ } { } VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultiDrawInfoEXT( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawInfoEXT( *reinterpret_cast( &rhs ) ) {} MultiDrawInfoEXT & operator=( MultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultiDrawInfoEXT & operator=( VkMultiDrawInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setFirstVertex( uint32_t firstVertex_ ) & VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT && setFirstVertex( uint32_t firstVertex_ ) && VULKAN_HPP_NOEXCEPT { firstVertex = firstVertex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT & setVertexCount( uint32_t vertexCount_ ) & VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiDrawInfoEXT && setVertexCount( uint32_t vertexCount_ ) && VULKAN_HPP_NOEXCEPT { vertexCount = vertexCount_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMultiDrawInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiDrawInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiDrawInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultiDrawInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( firstVertex, vertexCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultiDrawInfoEXT const & ) const = default; #else bool operator==( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( firstVertex == rhs.firstVertex ) && ( vertexCount == rhs.vertexCount ); # endif } bool operator!=( MultiDrawInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t firstVertex = {}; uint32_t vertexCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultiDrawInfoEXT; }; #endif // wrapper struct for struct VkMultisamplePropertiesEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisamplePropertiesEXT.html struct MultisamplePropertiesEXT { using NativeType = VkMultisamplePropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } { } VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultisamplePropertiesEXT( *reinterpret_cast( &rhs ) ) { } MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkMultisamplePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultisamplePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultisamplePropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxSampleLocationGridSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultisamplePropertiesEXT const & ) const = default; #else bool operator==( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); # endif } bool operator!=( MultisamplePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMultisamplePropertiesEXT; void * pNext = {}; Extent2D maxSampleLocationGridSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultisamplePropertiesEXT; }; #endif template <> struct CppType { using Type = MultisamplePropertiesEXT; }; // wrapper struct for struct VkMultisampledRenderToSingleSampledInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultisampledRenderToSingleSampledInfoEXT.html struct MultisampledRenderToSingleSampledInfoEXT { using NativeType = VkMultisampledRenderToSingleSampledInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( Bool32 multisampledRenderToSingleSampledEnable_ = {}, SampleCountFlagBits rasterizationSamples_ = SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ } , rasterizationSamples{ rasterizationSamples_ } { } VULKAN_HPP_CONSTEXPR MultisampledRenderToSingleSampledInfoEXT( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultisampledRenderToSingleSampledInfoEXT( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultisampledRenderToSingleSampledInfoEXT( *reinterpret_cast( &rhs ) ) { } MultisampledRenderToSingleSampledInfoEXT & operator=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultisampledRenderToSingleSampledInfoEXT & operator=( VkMultisampledRenderToSingleSampledInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setMultisampledRenderToSingleSampledEnable( Bool32 multisampledRenderToSingleSampledEnable_ ) & VULKAN_HPP_NOEXCEPT { multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT && setMultisampledRenderToSingleSampledEnable( Bool32 multisampledRenderToSingleSampledEnable_ ) && VULKAN_HPP_NOEXCEPT { multisampledRenderToSingleSampledEnable = multisampledRenderToSingleSampledEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT & setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) & VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultisampledRenderToSingleSampledInfoEXT && setRasterizationSamples( SampleCountFlagBits rasterizationSamples_ ) && VULKAN_HPP_NOEXCEPT { rasterizationSamples = rasterizationSamples_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMultisampledRenderToSingleSampledInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultisampledRenderToSingleSampledInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultisampledRenderToSingleSampledInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultisampledRenderToSingleSampledInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multisampledRenderToSingleSampledEnable, rasterizationSamples ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultisampledRenderToSingleSampledInfoEXT const & ) const = default; #else bool operator==( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampledEnable == rhs.multisampledRenderToSingleSampledEnable ) && ( rasterizationSamples == rhs.rasterizationSamples ); # endif } bool operator!=( MultisampledRenderToSingleSampledInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMultisampledRenderToSingleSampledInfoEXT; const void * pNext = {}; Bool32 multisampledRenderToSingleSampledEnable = {}; SampleCountFlagBits rasterizationSamples = SampleCountFlagBits::e1; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultisampledRenderToSingleSampledInfoEXT; }; #endif template <> struct CppType { using Type = MultisampledRenderToSingleSampledInfoEXT; }; // wrapper struct for struct VkMultiviewPerViewAttributesInfoNVX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewAttributesInfoNVX.html struct MultiviewPerViewAttributesInfoNVX { using NativeType = VkMultiviewPerViewAttributesInfoNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewAttributesInfoNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( Bool32 perViewAttributes_ = {}, Bool32 perViewAttributesPositionXOnly_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , perViewAttributes{ perViewAttributes_ } , perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ } { } VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultiviewPerViewAttributesInfoNVX( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT : MultiviewPerViewAttributesInfoNVX( *reinterpret_cast( &rhs ) ) { } MultiviewPerViewAttributesInfoNVX & operator=( MultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultiviewPerViewAttributesInfoNVX & operator=( VkMultiviewPerViewAttributesInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributes( Bool32 perViewAttributes_ ) & VULKAN_HPP_NOEXCEPT { perViewAttributes = perViewAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX && setPerViewAttributes( Bool32 perViewAttributes_ ) && VULKAN_HPP_NOEXCEPT { perViewAttributes = perViewAttributes_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX & setPerViewAttributesPositionXOnly( Bool32 perViewAttributesPositionXOnly_ ) & VULKAN_HPP_NOEXCEPT { perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewAttributesInfoNVX && setPerViewAttributesPositionXOnly( Bool32 perViewAttributesPositionXOnly_ ) && VULKAN_HPP_NOEXCEPT { perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiviewPerViewAttributesInfoNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultiviewPerViewAttributesInfoNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, perViewAttributes, perViewAttributesPositionXOnly ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultiviewPerViewAttributesInfoNVX const & ) const = default; #else bool operator==( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewAttributes == rhs.perViewAttributes ) && ( perViewAttributesPositionXOnly == rhs.perViewAttributesPositionXOnly ); # endif } bool operator!=( MultiviewPerViewAttributesInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMultiviewPerViewAttributesInfoNVX; const void * pNext = {}; Bool32 perViewAttributes = {}; Bool32 perViewAttributesPositionXOnly = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultiviewPerViewAttributesInfoNVX; }; #endif template <> struct CppType { using Type = MultiviewPerViewAttributesInfoNVX; }; // wrapper struct for struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM.html struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { using NativeType = VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t perViewRenderAreaCount_ = {}, const Rect2D * pPerViewRenderAreas_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , perViewRenderAreaCount{ perViewRenderAreaCount_ } , pPerViewRenderAreas{ pPerViewRenderAreas_ } { } VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( ArrayProxyNoTemporaries const & perViewRenderAreas_, const void * pNext_ = nullptr ) : pNext( pNext_ ), perViewRenderAreaCount( static_cast( perViewRenderAreas_.size() ) ), pPerViewRenderAreas( perViewRenderAreas_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & operator=( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) & VULKAN_HPP_NOEXCEPT { perViewRenderAreaCount = perViewRenderAreaCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM && setPerViewRenderAreaCount( uint32_t perViewRenderAreaCount_ ) && VULKAN_HPP_NOEXCEPT { perViewRenderAreaCount = perViewRenderAreaCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPPerViewRenderAreas( const Rect2D * pPerViewRenderAreas_ ) & VULKAN_HPP_NOEXCEPT { pPerViewRenderAreas = pPerViewRenderAreas_; return *this; } VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM && setPPerViewRenderAreas( const Rect2D * pPerViewRenderAreas_ ) && VULKAN_HPP_NOEXCEPT { pPerViewRenderAreas = pPerViewRenderAreas_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPerViewRenderAreas( ArrayProxyNoTemporaries const & perViewRenderAreas_ ) VULKAN_HPP_NOEXCEPT { perViewRenderAreaCount = static_cast( perViewRenderAreas_.size() ); pPerViewRenderAreas = perViewRenderAreas_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, perViewRenderAreaCount, pPerViewRenderAreas ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & ) const = default; #else bool operator==( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewRenderAreaCount == rhs.perViewRenderAreaCount ) && ( pPerViewRenderAreas == rhs.pPerViewRenderAreas ); # endif } bool operator!=( MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; const void * pNext = {}; uint32_t perViewRenderAreaCount = {}; const Rect2D * pPerViewRenderAreas = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; }; #endif template <> struct CppType { using Type = MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; }; // wrapper struct for struct VkMutableDescriptorTypeListEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeListEXT.html struct MutableDescriptorTypeListEXT { using NativeType = VkMutableDescriptorTypeListEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, const DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT : descriptorTypeCount{ descriptorTypeCount_ } , pDescriptorTypes{ pDescriptorTypes_ } { } VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeListEXT( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MutableDescriptorTypeListEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MutableDescriptorTypeListEXT( ArrayProxyNoTemporaries const & descriptorTypes_ ) : descriptorTypeCount( static_cast( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ MutableDescriptorTypeListEXT & operator=( MutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MutableDescriptorTypeListEXT & operator=( VkMutableDescriptorTypeListEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorTypeCount = descriptorTypeCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT && setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorTypeCount = descriptorTypeCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT & setPDescriptorTypes( const DescriptorType * pDescriptorTypes_ ) & VULKAN_HPP_NOEXCEPT { pDescriptorTypes = pDescriptorTypes_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListEXT && setPDescriptorTypes( const DescriptorType * pDescriptorTypes_ ) && VULKAN_HPP_NOEXCEPT { pDescriptorTypes = pDescriptorTypes_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MutableDescriptorTypeListEXT & setDescriptorTypes( ArrayProxyNoTemporaries const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT { descriptorTypeCount = static_cast( descriptorTypes_.size() ); pDescriptorTypes = descriptorTypes_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMutableDescriptorTypeListEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMutableDescriptorTypeListEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMutableDescriptorTypeListEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( descriptorTypeCount, pDescriptorTypes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MutableDescriptorTypeListEXT const & ) const = default; #else bool operator==( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( descriptorTypeCount == rhs.descriptorTypeCount ) && ( pDescriptorTypes == rhs.pDescriptorTypes ); # endif } bool operator!=( MutableDescriptorTypeListEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t descriptorTypeCount = {}; const DescriptorType * pDescriptorTypes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MutableDescriptorTypeListEXT; }; #endif using MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT; // wrapper struct for struct VkMutableDescriptorTypeCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkMutableDescriptorTypeCreateInfoEXT.html struct MutableDescriptorTypeCreateInfoEXT { using NativeType = VkMutableDescriptorTypeCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ } , pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ } { } VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; MutableDescriptorTypeCreateInfoEXT( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MutableDescriptorTypeCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MutableDescriptorTypeCreateInfoEXT( ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , mutableDescriptorTypeListCount( static_cast( mutableDescriptorTypeLists_.size() ) ) , pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ MutableDescriptorTypeCreateInfoEXT & operator=( MutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ MutableDescriptorTypeCreateInfoEXT & operator=( VkMutableDescriptorTypeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) & VULKAN_HPP_NOEXCEPT { mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT && setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) && VULKAN_HPP_NOEXCEPT { mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT & setPMutableDescriptorTypeLists( const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) & VULKAN_HPP_NOEXCEPT { pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; return *this; } VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoEXT && setPMutableDescriptorTypeLists( const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ ) && VULKAN_HPP_NOEXCEPT { pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MutableDescriptorTypeCreateInfoEXT & setMutableDescriptorTypeLists( ArrayProxyNoTemporaries const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT { mutableDescriptorTypeListCount = static_cast( mutableDescriptorTypeLists_.size() ); pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkMutableDescriptorTypeCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMutableDescriptorTypeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkMutableDescriptorTypeCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkMutableDescriptorTypeCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mutableDescriptorTypeListCount, pMutableDescriptorTypeLists ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( MutableDescriptorTypeCreateInfoEXT const & ) const = default; #else bool operator==( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount ) && ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists ); # endif } bool operator!=( MutableDescriptorTypeCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoEXT; const void * pNext = {}; uint32_t mutableDescriptorTypeListCount = {}; const MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = MutableDescriptorTypeCreateInfoEXT; }; #endif template <> struct CppType { using Type = MutableDescriptorTypeCreateInfoEXT; }; using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkNativeBufferFormatPropertiesOHOS, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferFormatPropertiesOHOS.html struct NativeBufferFormatPropertiesOHOS { using NativeType = VkNativeBufferFormatPropertiesOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eNativeBufferFormatPropertiesOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR NativeBufferFormatPropertiesOHOS( Format format_ = Format::eUndefined, uint64_t externalFormat_ = {}, FormatFeatureFlags formatFeatures_ = {}, ComponentMapping samplerYcbcrConversionComponents_ = {}, SamplerYcbcrModelConversion suggestedYcbcrModel_ = SamplerYcbcrModelConversion::eRgbIdentity, SamplerYcbcrRange suggestedYcbcrRange_ = SamplerYcbcrRange::eItuFull, ChromaLocation suggestedXChromaOffset_ = ChromaLocation::eCositedEven, ChromaLocation suggestedYChromaOffset_ = ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , format{ format_ } , externalFormat{ externalFormat_ } , formatFeatures{ formatFeatures_ } , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } , suggestedYcbcrModel{ suggestedYcbcrModel_ } , suggestedYcbcrRange{ suggestedYcbcrRange_ } , suggestedXChromaOffset{ suggestedXChromaOffset_ } , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } VULKAN_HPP_CONSTEXPR NativeBufferFormatPropertiesOHOS( NativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; NativeBufferFormatPropertiesOHOS( VkNativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : NativeBufferFormatPropertiesOHOS( *reinterpret_cast( &rhs ) ) { } NativeBufferFormatPropertiesOHOS & operator=( NativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ NativeBufferFormatPropertiesOHOS & operator=( VkNativeBufferFormatPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkNativeBufferFormatPropertiesOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferFormatPropertiesOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferFormatPropertiesOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkNativeBufferFormatPropertiesOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format, externalFormat, formatFeatures, samplerYcbcrConversionComponents, suggestedYcbcrModel, suggestedYcbcrRange, suggestedXChromaOffset, suggestedYChromaOffset ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( NativeBufferFormatPropertiesOHOS const & ) const = default; # else bool operator==( NativeBufferFormatPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( externalFormat == rhs.externalFormat ) && ( formatFeatures == rhs.formatFeatures ) && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); # endif } bool operator!=( NativeBufferFormatPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eNativeBufferFormatPropertiesOHOS; void * pNext = {}; Format format = Format::eUndefined; uint64_t externalFormat = {}; FormatFeatureFlags formatFeatures = {}; ComponentMapping samplerYcbcrConversionComponents = {}; SamplerYcbcrModelConversion suggestedYcbcrModel = SamplerYcbcrModelConversion::eRgbIdentity; SamplerYcbcrRange suggestedYcbcrRange = SamplerYcbcrRange::eItuFull; ChromaLocation suggestedXChromaOffset = ChromaLocation::eCositedEven; ChromaLocation suggestedYChromaOffset = ChromaLocation::eCositedEven; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = NativeBufferFormatPropertiesOHOS; }; # endif template <> struct CppType { using Type = NativeBufferFormatPropertiesOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkNativeBufferPropertiesOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferPropertiesOHOS.html struct NativeBufferPropertiesOHOS { using NativeType = VkNativeBufferPropertiesOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eNativeBufferPropertiesOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR NativeBufferPropertiesOHOS( DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , allocationSize{ allocationSize_ } , memoryTypeBits{ memoryTypeBits_ } { } VULKAN_HPP_CONSTEXPR NativeBufferPropertiesOHOS( NativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; NativeBufferPropertiesOHOS( VkNativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : NativeBufferPropertiesOHOS( *reinterpret_cast( &rhs ) ) { } NativeBufferPropertiesOHOS & operator=( NativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ NativeBufferPropertiesOHOS & operator=( VkNativeBufferPropertiesOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkNativeBufferPropertiesOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferPropertiesOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferPropertiesOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkNativeBufferPropertiesOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, allocationSize, memoryTypeBits ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( NativeBufferPropertiesOHOS const & ) const = default; # else bool operator==( NativeBufferPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allocationSize == rhs.allocationSize ) && ( memoryTypeBits == rhs.memoryTypeBits ); # endif } bool operator!=( NativeBufferPropertiesOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eNativeBufferPropertiesOHOS; void * pNext = {}; DeviceSize allocationSize = {}; uint32_t memoryTypeBits = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = NativeBufferPropertiesOHOS; }; # endif template <> struct CppType { using Type = NativeBufferPropertiesOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ #if defined( VK_USE_PLATFORM_OHOS ) // wrapper struct for struct VkNativeBufferUsageOHOS, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkNativeBufferUsageOHOS.html struct NativeBufferUsageOHOS { using NativeType = VkNativeBufferUsageOHOS; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eNativeBufferUsageOHOS; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR NativeBufferUsageOHOS( uint64_t OHOSNativeBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , OHOSNativeBufferUsage{ OHOSNativeBufferUsage_ } { } VULKAN_HPP_CONSTEXPR NativeBufferUsageOHOS( NativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; NativeBufferUsageOHOS( VkNativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT : NativeBufferUsageOHOS( *reinterpret_cast( &rhs ) ) { } NativeBufferUsageOHOS & operator=( NativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ NativeBufferUsageOHOS & operator=( VkNativeBufferUsageOHOS const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkNativeBufferUsageOHOS const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferUsageOHOS &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkNativeBufferUsageOHOS const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkNativeBufferUsageOHOS *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, OHOSNativeBufferUsage ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( NativeBufferUsageOHOS const & ) const = default; # else bool operator==( NativeBufferUsageOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( OHOSNativeBufferUsage == rhs.OHOSNativeBufferUsage ); # endif } bool operator!=( NativeBufferUsageOHOS const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::eNativeBufferUsageOHOS; void * pNext = {}; uint64_t OHOSNativeBufferUsage = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = NativeBufferUsageOHOS; }; # endif template <> struct CppType { using Type = NativeBufferUsageOHOS; }; #endif /*VK_USE_PLATFORM_OHOS*/ // wrapper struct for struct VkOpaqueCaptureDataCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDataCreateInfoEXT.html struct OpaqueCaptureDataCreateInfoEXT { using NativeType = VkOpaqueCaptureDataCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDataCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpaqueCaptureDataCreateInfoEXT( const HostAddressRangeConstEXT * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pData{ pData_ } { } VULKAN_HPP_CONSTEXPR OpaqueCaptureDataCreateInfoEXT( OpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpaqueCaptureDataCreateInfoEXT( VkOpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : OpaqueCaptureDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } OpaqueCaptureDataCreateInfoEXT & operator=( OpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpaqueCaptureDataCreateInfoEXT & operator=( VkOpaqueCaptureDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT & setPData( const HostAddressRangeConstEXT * pData_ ) & VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDataCreateInfoEXT && setPData( const HostAddressRangeConstEXT * pData_ ) && VULKAN_HPP_NOEXCEPT { pData = pData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpaqueCaptureDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpaqueCaptureDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpaqueCaptureDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpaqueCaptureDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpaqueCaptureDataCreateInfoEXT const & ) const = default; #else bool operator==( OpaqueCaptureDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pData == rhs.pData ); # endif } bool operator!=( OpaqueCaptureDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpaqueCaptureDataCreateInfoEXT; const void * pNext = {}; const HostAddressRangeConstEXT * pData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpaqueCaptureDataCreateInfoEXT; }; #endif template <> struct CppType { using Type = OpaqueCaptureDataCreateInfoEXT; }; // wrapper struct for struct VkOpaqueCaptureDescriptorDataCreateInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html struct OpaqueCaptureDescriptorDataCreateInfoEXT { using NativeType = VkOpaqueCaptureDescriptorDataCreateInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ } { } VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpaqueCaptureDescriptorDataCreateInfoEXT( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : OpaqueCaptureDescriptorDataCreateInfoEXT( *reinterpret_cast( &rhs ) ) { } OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpaqueCaptureDescriptorDataCreateInfoEXT & operator=( VkOpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT & setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) & VULKAN_HPP_NOEXCEPT { opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpaqueCaptureDescriptorDataCreateInfoEXT && setOpaqueCaptureDescriptorData( const void * opaqueCaptureDescriptorData_ ) && VULKAN_HPP_NOEXCEPT { opaqueCaptureDescriptorData = opaqueCaptureDescriptorData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpaqueCaptureDescriptorDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpaqueCaptureDescriptorDataCreateInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpaqueCaptureDescriptorDataCreateInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, opaqueCaptureDescriptorData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpaqueCaptureDescriptorDataCreateInfoEXT const & ) const = default; #else bool operator==( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opaqueCaptureDescriptorData == rhs.opaqueCaptureDescriptorData ); # endif } bool operator!=( OpaqueCaptureDescriptorDataCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpaqueCaptureDescriptorDataCreateInfoEXT; const void * pNext = {}; const void * opaqueCaptureDescriptorData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpaqueCaptureDescriptorDataCreateInfoEXT; }; #endif template <> struct CppType { using Type = OpaqueCaptureDescriptorDataCreateInfoEXT; }; // wrapper struct for struct VkOpticalFlowExecuteInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowExecuteInfoNV.html struct OpticalFlowExecuteInfoNV { using NativeType = VkOpticalFlowExecuteInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowExecuteInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteFlagsNV flags_ = {}, uint32_t regionCount_ = {}, const Rect2D * pRegions_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , regionCount{ regionCount_ } , pRegions{ pRegions_ } { } VULKAN_HPP_CONSTEXPR OpticalFlowExecuteInfoNV( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpticalFlowExecuteInfoNV( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : OpticalFlowExecuteInfoNV( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) OpticalFlowExecuteInfoNV( OpticalFlowExecuteFlagsNV flags_, ArrayProxyNoTemporaries const & regions_, void * pNext_ = nullptr ) : pNext( pNext_ ), flags( flags_ ), regionCount( static_cast( regions_.size() ) ), pRegions( regions_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ OpticalFlowExecuteInfoNV & operator=( OpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpticalFlowExecuteInfoNV & operator=( VkOpticalFlowExecuteInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setFlags( OpticalFlowExecuteFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV && setFlags( OpticalFlowExecuteFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setRegionCount( uint32_t regionCount_ ) & VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV && setRegionCount( uint32_t regionCount_ ) && VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV & setPRegions( const Rect2D * pRegions_ ) & VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowExecuteInfoNV && setPRegions( const Rect2D * pRegions_ ) && VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) OpticalFlowExecuteInfoNV & setRegions( ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpticalFlowExecuteInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowExecuteInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowExecuteInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpticalFlowExecuteInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, regionCount, pRegions ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpticalFlowExecuteInfoNV const & ) const = default; #else bool operator==( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( regionCount == rhs.regionCount ) && ( pRegions == rhs.pRegions ); # endif } bool operator!=( OpticalFlowExecuteInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpticalFlowExecuteInfoNV; void * pNext = {}; OpticalFlowExecuteFlagsNV flags = {}; uint32_t regionCount = {}; const Rect2D * pRegions = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpticalFlowExecuteInfoNV; }; #endif template <> struct CppType { using Type = OpticalFlowExecuteInfoNV; }; // wrapper struct for struct VkOpticalFlowImageFormatInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatInfoNV.html struct OpticalFlowImageFormatInfoNV { using NativeType = VkOpticalFlowImageFormatInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowUsageFlagsNV usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , usage{ usage_ } { } VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpticalFlowImageFormatInfoNV( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : OpticalFlowImageFormatInfoNV( *reinterpret_cast( &rhs ) ) { } OpticalFlowImageFormatInfoNV & operator=( OpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpticalFlowImageFormatInfoNV & operator=( VkOpticalFlowImageFormatInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV & setUsage( OpticalFlowUsageFlagsNV usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowImageFormatInfoNV && setUsage( OpticalFlowUsageFlagsNV usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowImageFormatInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowImageFormatInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpticalFlowImageFormatInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, usage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpticalFlowImageFormatInfoNV const & ) const = default; #else bool operator==( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( usage == rhs.usage ); # endif } bool operator!=( OpticalFlowImageFormatInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpticalFlowImageFormatInfoNV; const void * pNext = {}; OpticalFlowUsageFlagsNV usage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpticalFlowImageFormatInfoNV; }; #endif template <> struct CppType { using Type = OpticalFlowImageFormatInfoNV; }; // wrapper struct for struct VkOpticalFlowImageFormatPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowImageFormatPropertiesNV.html struct OpticalFlowImageFormatPropertiesNV { using NativeType = VkOpticalFlowImageFormatPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowImageFormatPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( Format format_ = Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , format{ format_ } { } VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpticalFlowImageFormatPropertiesNV( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : OpticalFlowImageFormatPropertiesNV( *reinterpret_cast( &rhs ) ) { } OpticalFlowImageFormatPropertiesNV & operator=( OpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpticalFlowImageFormatPropertiesNV & operator=( VkOpticalFlowImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkOpticalFlowImageFormatPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowImageFormatPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpticalFlowImageFormatPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpticalFlowImageFormatPropertiesNV const & ) const = default; #else bool operator==( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ); # endif } bool operator!=( OpticalFlowImageFormatPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpticalFlowImageFormatPropertiesNV; void * pNext = {}; Format format = Format::eUndefined; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpticalFlowImageFormatPropertiesNV; }; #endif template <> struct CppType { using Type = OpticalFlowImageFormatPropertiesNV; }; // wrapper struct for struct VkOpticalFlowSessionCreateInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreateInfoNV.html struct OpticalFlowSessionCreateInfoNV { using NativeType = VkOpticalFlowSessionCreateInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreateInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( uint32_t width_ = {}, uint32_t height_ = {}, Format imageFormat_ = Format::eUndefined, Format flowVectorFormat_ = Format::eUndefined, Format costFormat_ = Format::eUndefined, OpticalFlowGridSizeFlagsNV outputGridSize_ = {}, OpticalFlowGridSizeFlagsNV hintGridSize_ = {}, OpticalFlowPerformanceLevelNV performanceLevel_ = OpticalFlowPerformanceLevelNV::eUnknown, OpticalFlowSessionCreateFlagsNV flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , width{ width_ } , height{ height_ } , imageFormat{ imageFormat_ } , flowVectorFormat{ flowVectorFormat_ } , costFormat{ costFormat_ } , outputGridSize{ outputGridSize_ } , hintGridSize{ hintGridSize_ } , performanceLevel{ performanceLevel_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreateInfoNV( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpticalFlowSessionCreateInfoNV( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : OpticalFlowSessionCreateInfoNV( *reinterpret_cast( &rhs ) ) { } OpticalFlowSessionCreateInfoNV & operator=( OpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpticalFlowSessionCreateInfoNV & operator=( VkOpticalFlowSessionCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setWidth( uint32_t width_ ) & VULKAN_HPP_NOEXCEPT { width = width_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setWidth( uint32_t width_ ) && VULKAN_HPP_NOEXCEPT { width = width_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHeight( uint32_t height_ ) & VULKAN_HPP_NOEXCEPT { height = height_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setHeight( uint32_t height_ ) && VULKAN_HPP_NOEXCEPT { height = height_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setImageFormat( Format imageFormat_ ) & VULKAN_HPP_NOEXCEPT { imageFormat = imageFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setImageFormat( Format imageFormat_ ) && VULKAN_HPP_NOEXCEPT { imageFormat = imageFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlowVectorFormat( Format flowVectorFormat_ ) & VULKAN_HPP_NOEXCEPT { flowVectorFormat = flowVectorFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setFlowVectorFormat( Format flowVectorFormat_ ) && VULKAN_HPP_NOEXCEPT { flowVectorFormat = flowVectorFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setCostFormat( Format costFormat_ ) & VULKAN_HPP_NOEXCEPT { costFormat = costFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setCostFormat( Format costFormat_ ) && VULKAN_HPP_NOEXCEPT { costFormat = costFormat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setOutputGridSize( OpticalFlowGridSizeFlagsNV outputGridSize_ ) & VULKAN_HPP_NOEXCEPT { outputGridSize = outputGridSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setOutputGridSize( OpticalFlowGridSizeFlagsNV outputGridSize_ ) && VULKAN_HPP_NOEXCEPT { outputGridSize = outputGridSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setHintGridSize( OpticalFlowGridSizeFlagsNV hintGridSize_ ) & VULKAN_HPP_NOEXCEPT { hintGridSize = hintGridSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setHintGridSize( OpticalFlowGridSizeFlagsNV hintGridSize_ ) && VULKAN_HPP_NOEXCEPT { hintGridSize = hintGridSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setPerformanceLevel( OpticalFlowPerformanceLevelNV performanceLevel_ ) & VULKAN_HPP_NOEXCEPT { performanceLevel = performanceLevel_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setPerformanceLevel( OpticalFlowPerformanceLevelNV performanceLevel_ ) && VULKAN_HPP_NOEXCEPT { performanceLevel = performanceLevel_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV & setFlags( OpticalFlowSessionCreateFlagsNV flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreateInfoNV && setFlags( OpticalFlowSessionCreateFlagsNV flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpticalFlowSessionCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowSessionCreateInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowSessionCreateInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpticalFlowSessionCreateInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, width, height, imageFormat, flowVectorFormat, costFormat, outputGridSize, hintGridSize, performanceLevel, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpticalFlowSessionCreateInfoNV const & ) const = default; #else bool operator==( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( width == rhs.width ) && ( height == rhs.height ) && ( imageFormat == rhs.imageFormat ) && ( flowVectorFormat == rhs.flowVectorFormat ) && ( costFormat == rhs.costFormat ) && ( outputGridSize == rhs.outputGridSize ) && ( hintGridSize == rhs.hintGridSize ) && ( performanceLevel == rhs.performanceLevel ) && ( flags == rhs.flags ); # endif } bool operator!=( OpticalFlowSessionCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpticalFlowSessionCreateInfoNV; void * pNext = {}; uint32_t width = {}; uint32_t height = {}; Format imageFormat = Format::eUndefined; Format flowVectorFormat = Format::eUndefined; Format costFormat = Format::eUndefined; OpticalFlowGridSizeFlagsNV outputGridSize = {}; OpticalFlowGridSizeFlagsNV hintGridSize = {}; OpticalFlowPerformanceLevelNV performanceLevel = OpticalFlowPerformanceLevelNV::eUnknown; OpticalFlowSessionCreateFlagsNV flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpticalFlowSessionCreateInfoNV; }; #endif template <> struct CppType { using Type = OpticalFlowSessionCreateInfoNV; }; // wrapper struct for struct VkOpticalFlowSessionCreatePrivateDataInfoNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkOpticalFlowSessionCreatePrivateDataInfoNV.html struct OpticalFlowSessionCreatePrivateDataInfoNV { using NativeType = VkOpticalFlowSessionCreatePrivateDataInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( uint32_t id_ = {}, uint32_t size_ = {}, const void * pPrivateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , id{ id_ } , size{ size_ } , pPrivateData{ pPrivateData_ } { } VULKAN_HPP_CONSTEXPR OpticalFlowSessionCreatePrivateDataInfoNV( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OpticalFlowSessionCreatePrivateDataInfoNV( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : OpticalFlowSessionCreatePrivateDataInfoNV( *reinterpret_cast( &rhs ) ) { } OpticalFlowSessionCreatePrivateDataInfoNV & operator=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OpticalFlowSessionCreatePrivateDataInfoNV & operator=( VkOpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setId( uint32_t id_ ) & VULKAN_HPP_NOEXCEPT { id = id_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV && setId( uint32_t id_ ) && VULKAN_HPP_NOEXCEPT { id = id_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setSize( uint32_t size_ ) & VULKAN_HPP_NOEXCEPT { size = size_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV && setSize( uint32_t size_ ) && VULKAN_HPP_NOEXCEPT { size = size_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV & setPPrivateData( const void * pPrivateData_ ) & VULKAN_HPP_NOEXCEPT { pPrivateData = pPrivateData_; return *this; } VULKAN_HPP_CONSTEXPR_14 OpticalFlowSessionCreatePrivateDataInfoNV && setPPrivateData( const void * pPrivateData_ ) && VULKAN_HPP_NOEXCEPT { pPrivateData = pPrivateData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOpticalFlowSessionCreatePrivateDataInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowSessionCreatePrivateDataInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOpticalFlowSessionCreatePrivateDataInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOpticalFlowSessionCreatePrivateDataInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, id, size, pPrivateData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OpticalFlowSessionCreatePrivateDataInfoNV const & ) const = default; #else bool operator==( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( id == rhs.id ) && ( size == rhs.size ) && ( pPrivateData == rhs.pPrivateData ); # endif } bool operator!=( OpticalFlowSessionCreatePrivateDataInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; void * pNext = {}; uint32_t id = {}; uint32_t size = {}; const void * pPrivateData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OpticalFlowSessionCreatePrivateDataInfoNV; }; #endif template <> struct CppType { using Type = OpticalFlowSessionCreatePrivateDataInfoNV; }; // wrapper struct for struct VkOutOfBandQueueTypeInfoNV, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkOutOfBandQueueTypeInfoNV.html struct OutOfBandQueueTypeInfoNV { using NativeType = VkOutOfBandQueueTypeInfoNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eOutOfBandQueueTypeInfoNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeNV queueType_ = OutOfBandQueueTypeNV::eRender, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , queueType{ queueType_ } { } VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; OutOfBandQueueTypeInfoNV( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT : OutOfBandQueueTypeInfoNV( *reinterpret_cast( &rhs ) ) { } OutOfBandQueueTypeInfoNV & operator=( OutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ OutOfBandQueueTypeInfoNV & operator=( VkOutOfBandQueueTypeInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV & setQueueType( OutOfBandQueueTypeNV queueType_ ) & VULKAN_HPP_NOEXCEPT { queueType = queueType_; return *this; } VULKAN_HPP_CONSTEXPR_14 OutOfBandQueueTypeInfoNV && setQueueType( OutOfBandQueueTypeNV queueType_ ) && VULKAN_HPP_NOEXCEPT { queueType = queueType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkOutOfBandQueueTypeInfoNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOutOfBandQueueTypeInfoNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkOutOfBandQueueTypeInfoNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkOutOfBandQueueTypeInfoNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, queueType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( OutOfBandQueueTypeInfoNV const & ) const = default; #else bool operator==( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueType == rhs.queueType ); # endif } bool operator!=( OutOfBandQueueTypeInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; const void * pNext = {}; OutOfBandQueueTypeNV queueType = OutOfBandQueueTypeNV::eRender; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = OutOfBandQueueTypeInfoNV; }; #endif template <> struct CppType { using Type = OutOfBandQueueTypeInfoNV; }; // wrapper struct for struct VkPartitionedAccelerationStructureFlagsNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureFlagsNV.html struct PartitionedAccelerationStructureFlagsNV { using NativeType = VkPartitionedAccelerationStructureFlagsNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePartitionedAccelerationStructureFlagsNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( Bool32 enablePartitionTranslation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , enablePartitionTranslation{ enablePartitionTranslation_ } { } VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureFlagsNV( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PartitionedAccelerationStructureFlagsNV( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT : PartitionedAccelerationStructureFlagsNV( *reinterpret_cast( &rhs ) ) { } PartitionedAccelerationStructureFlagsNV & operator=( PartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PartitionedAccelerationStructureFlagsNV & operator=( VkPartitionedAccelerationStructureFlagsNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV & setEnablePartitionTranslation( Bool32 enablePartitionTranslation_ ) & VULKAN_HPP_NOEXCEPT { enablePartitionTranslation = enablePartitionTranslation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureFlagsNV && setEnablePartitionTranslation( Bool32 enablePartitionTranslation_ ) && VULKAN_HPP_NOEXCEPT { enablePartitionTranslation = enablePartitionTranslation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPartitionedAccelerationStructureFlagsNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureFlagsNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureFlagsNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureFlagsNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, enablePartitionTranslation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PartitionedAccelerationStructureFlagsNV const & ) const = default; #else bool operator==( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( enablePartitionTranslation == rhs.enablePartitionTranslation ); # endif } bool operator!=( PartitionedAccelerationStructureFlagsNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePartitionedAccelerationStructureFlagsNV; void * pNext = {}; Bool32 enablePartitionTranslation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PartitionedAccelerationStructureFlagsNV; }; #endif template <> struct CppType { using Type = PartitionedAccelerationStructureFlagsNV; }; // wrapper struct for struct VkPartitionedAccelerationStructureUpdateInstanceDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureUpdateInstanceDataNV.html struct PartitionedAccelerationStructureUpdateInstanceDataNV { using NativeType = VkPartitionedAccelerationStructureUpdateInstanceDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureUpdateInstanceDataNV( uint32_t instanceIndex_ = {}, uint32_t instanceContributionToHitGroupIndex_ = {}, DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : instanceIndex{ instanceIndex_ } , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } , accelerationStructure{ accelerationStructure_ } { } VULKAN_HPP_CONSTEXPR PartitionedAccelerationStructureUpdateInstanceDataNV( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PartitionedAccelerationStructureUpdateInstanceDataNV( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : PartitionedAccelerationStructureUpdateInstanceDataNV( *reinterpret_cast( &rhs ) ) { } PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PartitionedAccelerationStructureUpdateInstanceDataNV & operator=( VkPartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceIndex = instanceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV && setInstanceIndex( uint32_t instanceIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceIndex = instanceIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV && setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV & setAccelerationStructure( DeviceAddress accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureUpdateInstanceDataNV && setAccelerationStructure( DeviceAddress accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureUpdateInstanceDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureUpdateInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureUpdateInstanceDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( instanceIndex, instanceContributionToHitGroupIndex, accelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PartitionedAccelerationStructureUpdateInstanceDataNV const & ) const = default; #else bool operator==( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( instanceIndex == rhs.instanceIndex ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && ( accelerationStructure == rhs.accelerationStructure ); # endif } bool operator!=( PartitionedAccelerationStructureUpdateInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t instanceIndex = {}; uint32_t instanceContributionToHitGroupIndex = {}; DeviceAddress accelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PartitionedAccelerationStructureUpdateInstanceDataNV; }; #endif // wrapper struct for struct VkPartitionedAccelerationStructureWriteInstanceDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWriteInstanceDataNV.html struct PartitionedAccelerationStructureWriteInstanceDataNV { using NativeType = VkPartitionedAccelerationStructureWriteInstanceDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV( TransformMatrixKHR transform_ = {}, std::array const & explicitAABB_ = {}, uint32_t instanceID_ = {}, uint32_t instanceMask_ = {}, uint32_t instanceContributionToHitGroupIndex_ = {}, PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ = {}, uint32_t instanceIndex_ = {}, uint32_t partitionIndex_ = {}, DeviceAddress accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT : transform{ transform_ } , explicitAABB{ explicitAABB_ } , instanceID{ instanceID_ } , instanceMask{ instanceMask_ } , instanceContributionToHitGroupIndex{ instanceContributionToHitGroupIndex_ } , instanceFlags{ instanceFlags_ } , instanceIndex{ instanceIndex_ } , partitionIndex{ partitionIndex_ } , accelerationStructure{ accelerationStructure_ } { } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PartitionedAccelerationStructureWriteInstanceDataNV( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : PartitionedAccelerationStructureWriteInstanceDataNV( *reinterpret_cast( &rhs ) ) { } PartitionedAccelerationStructureWriteInstanceDataNV & operator=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PartitionedAccelerationStructureWriteInstanceDataNV & operator=( VkPartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setTransform( TransformMatrixKHR const & transform_ ) & VULKAN_HPP_NOEXCEPT { transform = transform_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setTransform( TransformMatrixKHR const & transform_ ) && VULKAN_HPP_NOEXCEPT { transform = transform_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setExplicitAABB( std::array explicitAABB_ ) & VULKAN_HPP_NOEXCEPT { explicitAABB = explicitAABB_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setExplicitAABB( std::array explicitAABB_ ) && VULKAN_HPP_NOEXCEPT { explicitAABB = explicitAABB_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceID( uint32_t instanceID_ ) & VULKAN_HPP_NOEXCEPT { instanceID = instanceID_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setInstanceID( uint32_t instanceID_ ) && VULKAN_HPP_NOEXCEPT { instanceID = instanceID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceMask( uint32_t instanceMask_ ) & VULKAN_HPP_NOEXCEPT { instanceMask = instanceMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setInstanceMask( uint32_t instanceMask_ ) && VULKAN_HPP_NOEXCEPT { instanceMask = instanceMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setInstanceContributionToHitGroupIndex( uint32_t instanceContributionToHitGroupIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceContributionToHitGroupIndex = instanceContributionToHitGroupIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceFlags( PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) & VULKAN_HPP_NOEXCEPT { instanceFlags = instanceFlags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setInstanceFlags( PartitionedAccelerationStructureInstanceFlagsNV instanceFlags_ ) && VULKAN_HPP_NOEXCEPT { instanceFlags = instanceFlags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setInstanceIndex( uint32_t instanceIndex_ ) & VULKAN_HPP_NOEXCEPT { instanceIndex = instanceIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setInstanceIndex( uint32_t instanceIndex_ ) && VULKAN_HPP_NOEXCEPT { instanceIndex = instanceIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setPartitionIndex( uint32_t partitionIndex_ ) & VULKAN_HPP_NOEXCEPT { partitionIndex = partitionIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setPartitionIndex( uint32_t partitionIndex_ ) && VULKAN_HPP_NOEXCEPT { partitionIndex = partitionIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV & setAccelerationStructure( DeviceAddress accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWriteInstanceDataNV && setAccelerationStructure( DeviceAddress accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPartitionedAccelerationStructureWriteInstanceDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWriteInstanceDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWriteInstanceDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWriteInstanceDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, uint32_t const &, uint32_t const &, PartitionedAccelerationStructureInstanceFlagsNV const &, uint32_t const &, uint32_t const &, DeviceAddress const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( transform, explicitAABB, instanceID, instanceMask, instanceContributionToHitGroupIndex, instanceFlags, instanceIndex, partitionIndex, accelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PartitionedAccelerationStructureWriteInstanceDataNV const & ) const = default; #else bool operator==( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( transform == rhs.transform ) && ( explicitAABB == rhs.explicitAABB ) && ( instanceID == rhs.instanceID ) && ( instanceMask == rhs.instanceMask ) && ( instanceContributionToHitGroupIndex == rhs.instanceContributionToHitGroupIndex ) && ( instanceFlags == rhs.instanceFlags ) && ( instanceIndex == rhs.instanceIndex ) && ( partitionIndex == rhs.partitionIndex ) && ( accelerationStructure == rhs.accelerationStructure ); # endif } bool operator!=( PartitionedAccelerationStructureWriteInstanceDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: TransformMatrixKHR transform = {}; ArrayWrapper1D explicitAABB = {}; uint32_t instanceID = {}; uint32_t instanceMask = {}; uint32_t instanceContributionToHitGroupIndex = {}; PartitionedAccelerationStructureInstanceFlagsNV instanceFlags = {}; uint32_t instanceIndex = {}; uint32_t partitionIndex = {}; DeviceAddress accelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PartitionedAccelerationStructureWriteInstanceDataNV; }; #endif // wrapper struct for struct VkPartitionedAccelerationStructureWritePartitionTranslationDataNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPartitionedAccelerationStructureWritePartitionTranslationDataNV.html struct PartitionedAccelerationStructureWritePartitionTranslationDataNV { using NativeType = VkPartitionedAccelerationStructureWritePartitionTranslationDataNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV( uint32_t partitionIndex_ = {}, std::array const & partitionTranslation_ = {} ) VULKAN_HPP_NOEXCEPT : partitionIndex{ partitionIndex_ } , partitionTranslation{ partitionTranslation_ } { } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PartitionedAccelerationStructureWritePartitionTranslationDataNV( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT : PartitionedAccelerationStructureWritePartitionTranslationDataNV( *reinterpret_cast( &rhs ) ) { } PartitionedAccelerationStructureWritePartitionTranslationDataNV & operator=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PartitionedAccelerationStructureWritePartitionTranslationDataNV & operator=( VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionIndex( uint32_t partitionIndex_ ) & VULKAN_HPP_NOEXCEPT { partitionIndex = partitionIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV && setPartitionIndex( uint32_t partitionIndex_ ) && VULKAN_HPP_NOEXCEPT { partitionIndex = partitionIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV & setPartitionTranslation( std::array partitionTranslation_ ) & VULKAN_HPP_NOEXCEPT { partitionTranslation = partitionTranslation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PartitionedAccelerationStructureWritePartitionTranslationDataNV && setPartitionTranslation( std::array partitionTranslation_ ) && VULKAN_HPP_NOEXCEPT { partitionTranslation = partitionTranslation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPartitionedAccelerationStructureWritePartitionTranslationDataNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( partitionIndex, partitionTranslation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & ) const = default; #else bool operator==( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( partitionIndex == rhs.partitionIndex ) && ( partitionTranslation == rhs.partitionTranslation ); # endif } bool operator!=( PartitionedAccelerationStructureWritePartitionTranslationDataNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t partitionIndex = {}; ArrayWrapper1D partitionTranslation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PartitionedAccelerationStructureWritePartitionTranslationDataNV; }; #endif // wrapper struct for struct VkPresentStageTimeEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPresentStageTimeEXT.html struct PresentStageTimeEXT { using NativeType = VkPresentStageTimeEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentStageTimeEXT( PresentStageFlagsEXT stage_ = {}, uint64_t time_ = {} ) VULKAN_HPP_NOEXCEPT : stage{ stage_ } , time{ time_ } { } VULKAN_HPP_CONSTEXPR PresentStageTimeEXT( PresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PresentStageTimeEXT( VkPresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PresentStageTimeEXT( *reinterpret_cast( &rhs ) ) { } PresentStageTimeEXT & operator=( PresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PresentStageTimeEXT & operator=( VkPresentStageTimeEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPresentStageTimeEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPresentStageTimeEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPresentStageTimeEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPresentStageTimeEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( stage, time ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PresentStageTimeEXT const & ) const = default; #else bool operator==( PresentStageTimeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( stage == rhs.stage ) && ( time == rhs.time ); # endif } bool operator!=( PresentStageTimeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: PresentStageFlagsEXT stage = {}; uint64_t time = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PresentStageTimeEXT; }; #endif // wrapper struct for struct VkPastPresentationTimingEXT, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingEXT.html struct PastPresentationTimingEXT { using NativeType = VkPastPresentationTimingEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePastPresentationTimingEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PastPresentationTimingEXT( uint64_t presentId_ = {}, uint64_t targetTime_ = {}, uint32_t presentStageCount_ = {}, PresentStageTimeEXT * pPresentStages_ = {}, TimeDomainKHR timeDomain_ = TimeDomainKHR::eDevice, uint64_t timeDomainId_ = {}, Bool32 reportComplete_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentId{ presentId_ } , targetTime{ targetTime_ } , presentStageCount{ presentStageCount_ } , pPresentStages{ pPresentStages_ } , timeDomain{ timeDomain_ } , timeDomainId{ timeDomainId_ } , reportComplete{ reportComplete_ } { } VULKAN_HPP_CONSTEXPR PastPresentationTimingEXT( PastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingEXT( VkPastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PastPresentationTimingEXT( *reinterpret_cast( &rhs ) ) { } PastPresentationTimingEXT & operator=( PastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PastPresentationTimingEXT & operator=( VkPastPresentationTimingEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPastPresentationTimingEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPastPresentationTimingEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentId, targetTime, presentStageCount, pPresentStages, timeDomain, timeDomainId, reportComplete ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PastPresentationTimingEXT const & ) const = default; #else bool operator==( PastPresentationTimingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ) && ( targetTime == rhs.targetTime ) && ( presentStageCount == rhs.presentStageCount ) && ( pPresentStages == rhs.pPresentStages ) && ( timeDomain == rhs.timeDomain ) && ( timeDomainId == rhs.timeDomainId ) && ( reportComplete == rhs.reportComplete ); # endif } bool operator!=( PastPresentationTimingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePastPresentationTimingEXT; void * pNext = {}; uint64_t presentId = {}; uint64_t targetTime = {}; uint32_t presentStageCount = {}; PresentStageTimeEXT * pPresentStages = {}; TimeDomainKHR timeDomain = TimeDomainKHR::eDevice; uint64_t timeDomainId = {}; Bool32 reportComplete = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PastPresentationTimingEXT; }; #endif template <> struct CppType { using Type = PastPresentationTimingEXT; }; // wrapper struct for struct VkPastPresentationTimingGOOGLE, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingGOOGLE.html struct PastPresentationTimingGOOGLE { using NativeType = VkPastPresentationTimingGOOGLE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT : presentID{ presentID_ } , desiredPresentTime{ desiredPresentTime_ } , actualPresentTime{ actualPresentTime_ } , earliestPresentTime{ earliestPresentTime_ } , presentMargin{ presentMargin_ } { } VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT : PastPresentationTimingGOOGLE( *reinterpret_cast( &rhs ) ) { } PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPastPresentationTimingGOOGLE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingGOOGLE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPastPresentationTimingGOOGLE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( presentID, desiredPresentTime, actualPresentTime, earliestPresentTime, presentMargin ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PastPresentationTimingGOOGLE const & ) const = default; #else bool operator==( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( presentID == rhs.presentID ) && ( desiredPresentTime == rhs.desiredPresentTime ) && ( actualPresentTime == rhs.actualPresentTime ) && ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); # endif } bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t presentID = {}; uint64_t desiredPresentTime = {}; uint64_t actualPresentTime = {}; uint64_t earliestPresentTime = {}; uint64_t presentMargin = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PastPresentationTimingGOOGLE; }; #endif // wrapper struct for struct VkPastPresentationTimingInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingInfoEXT.html struct PastPresentationTimingInfoEXT { using NativeType = VkPastPresentationTimingInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePastPresentationTimingInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PastPresentationTimingInfoEXT( PastPresentationTimingFlagsEXT flags_ = {}, SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , swapchain{ swapchain_ } { } VULKAN_HPP_CONSTEXPR PastPresentationTimingInfoEXT( PastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingInfoEXT( VkPastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PastPresentationTimingInfoEXT( *reinterpret_cast( &rhs ) ) { } PastPresentationTimingInfoEXT & operator=( PastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PastPresentationTimingInfoEXT & operator=( VkPastPresentationTimingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setFlags( PastPresentationTimingFlagsEXT flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT && setFlags( PastPresentationTimingFlagsEXT flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT & setSwapchain( SwapchainKHR swapchain_ ) & VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingInfoEXT && setSwapchain( SwapchainKHR swapchain_ ) && VULKAN_HPP_NOEXCEPT { swapchain = swapchain_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPastPresentationTimingInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPastPresentationTimingInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, swapchain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PastPresentationTimingInfoEXT const & ) const = default; #else bool operator==( PastPresentationTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( swapchain == rhs.swapchain ); # endif } bool operator!=( PastPresentationTimingInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePastPresentationTimingInfoEXT; const void * pNext = {}; PastPresentationTimingFlagsEXT flags = {}; SwapchainKHR swapchain = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PastPresentationTimingInfoEXT; }; #endif template <> struct CppType { using Type = PastPresentationTimingInfoEXT; }; // wrapper struct for struct VkPastPresentationTimingPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPastPresentationTimingPropertiesEXT.html struct PastPresentationTimingPropertiesEXT { using NativeType = VkPastPresentationTimingPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePastPresentationTimingPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PastPresentationTimingPropertiesEXT( uint64_t timingPropertiesCounter_ = {}, uint64_t timeDomainsCounter_ = {}, uint32_t presentationTimingCount_ = {}, PastPresentationTimingEXT * pPresentationTimings_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , timingPropertiesCounter{ timingPropertiesCounter_ } , timeDomainsCounter{ timeDomainsCounter_ } , presentationTimingCount{ presentationTimingCount_ } , pPresentationTimings{ pPresentationTimings_ } { } VULKAN_HPP_CONSTEXPR PastPresentationTimingPropertiesEXT( PastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PastPresentationTimingPropertiesEXT( VkPastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PastPresentationTimingPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PastPresentationTimingPropertiesEXT & operator=( PastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PastPresentationTimingPropertiesEXT & operator=( VkPastPresentationTimingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPastPresentationTimingPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPastPresentationTimingPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPastPresentationTimingPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, timingPropertiesCounter, timeDomainsCounter, presentationTimingCount, pPresentationTimings ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PastPresentationTimingPropertiesEXT const & ) const = default; #else bool operator==( PastPresentationTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( timingPropertiesCounter == rhs.timingPropertiesCounter ) && ( timeDomainsCounter == rhs.timeDomainsCounter ) && ( presentationTimingCount == rhs.presentationTimingCount ) && ( pPresentationTimings == rhs.pPresentationTimings ); # endif } bool operator!=( PastPresentationTimingPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePastPresentationTimingPropertiesEXT; void * pNext = {}; uint64_t timingPropertiesCounter = {}; uint64_t timeDomainsCounter = {}; uint32_t presentationTimingCount = {}; PastPresentationTimingEXT * pPresentationTimings = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PastPresentationTimingPropertiesEXT; }; #endif template <> struct CppType { using Type = PastPresentationTimingPropertiesEXT; }; // wrapper struct for struct VkPerTileBeginInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileBeginInfoQCOM.html struct PerTileBeginInfoQCOM { using NativeType = VkPerTileBeginInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileBeginInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR PerTileBeginInfoQCOM( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerTileBeginInfoQCOM( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PerTileBeginInfoQCOM( *reinterpret_cast( &rhs ) ) { } PerTileBeginInfoQCOM & operator=( PerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerTileBeginInfoQCOM & operator=( VkPerTileBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerTileBeginInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerTileBeginInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerTileBeginInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerTileBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerTileBeginInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerTileBeginInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerTileBeginInfoQCOM const & ) const = default; #else bool operator==( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( PerTileBeginInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerTileBeginInfoQCOM; const void * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerTileBeginInfoQCOM; }; #endif template <> struct CppType { using Type = PerTileBeginInfoQCOM; }; // wrapper struct for struct VkPerTileEndInfoQCOM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerTileEndInfoQCOM.html struct PerTileEndInfoQCOM { using NativeType = VkPerTileEndInfoQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerTileEndInfoQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR PerTileEndInfoQCOM( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerTileEndInfoQCOM( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PerTileEndInfoQCOM( *reinterpret_cast( &rhs ) ) {} PerTileEndInfoQCOM & operator=( PerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerTileEndInfoQCOM & operator=( VkPerTileEndInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerTileEndInfoQCOM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerTileEndInfoQCOM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerTileEndInfoQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerTileEndInfoQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerTileEndInfoQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerTileEndInfoQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerTileEndInfoQCOM const & ) const = default; #else bool operator==( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); # endif } bool operator!=( PerTileEndInfoQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerTileEndInfoQCOM; const void * pNext = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerTileEndInfoQCOM; }; #endif template <> struct CppType { using Type = PerTileEndInfoQCOM; }; // wrapper struct for struct VkPerformanceConfigurationAcquireInfoINTEL, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceConfigurationAcquireInfoINTEL.html struct PerformanceConfigurationAcquireInfoINTEL { using NativeType = VkPerformanceConfigurationAcquireInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationTypeINTEL type_ = PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } { } VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) { } PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & setType( PerformanceConfigurationTypeINTEL type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL && setType( PerformanceConfigurationTypeINTEL type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceConfigurationAcquireInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceConfigurationAcquireInfoINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceConfigurationAcquireInfoINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const & ) const = default; #else bool operator==( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); # endif } bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; const void * pNext = {}; PerformanceConfigurationTypeINTEL type = PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceConfigurationAcquireInfoINTEL; }; #endif template <> struct CppType { using Type = PerformanceConfigurationAcquireInfoINTEL; }; // wrapper struct for struct VkPerformanceCounterARM, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterARM.html struct PerformanceCounterARM { using NativeType = VkPerformanceCounterARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceCounterARM( uint32_t counterID_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , counterID{ counterID_ } { } VULKAN_HPP_CONSTEXPR PerformanceCounterARM( PerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterARM( VkPerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterARM( *reinterpret_cast( &rhs ) ) { } PerformanceCounterARM & operator=( PerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceCounterARM & operator=( VkPerformanceCounterARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPerformanceCounterARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceCounterARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, counterID ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceCounterARM const & ) const = default; #else bool operator==( PerformanceCounterARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterID == rhs.counterID ); # endif } bool operator!=( PerformanceCounterARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceCounterARM; void * pNext = {}; uint32_t counterID = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceCounterARM; }; #endif template <> struct CppType { using Type = PerformanceCounterARM; }; // wrapper struct for struct VkPerformanceCounterDescriptionARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionARM.html struct PerformanceCounterDescriptionARM { using NativeType = VkPerformanceCounterDescriptionARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionARM( PerformanceCounterDescriptionFlagsARM flags_ = {}, std::array const & name_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , name{ name_ } { } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionARM( PerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterDescriptionARM( VkPerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterDescriptionARM( *reinterpret_cast( &rhs ) ) { } PerformanceCounterDescriptionARM & operator=( PerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceCounterDescriptionARM & operator=( VkPerformanceCounterDescriptionARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPerformanceCounterDescriptionARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, name ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ); } bool operator!=( PerformanceCounterDescriptionARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::ePerformanceCounterDescriptionARM; void * pNext = {}; PerformanceCounterDescriptionFlagsARM flags = {}; ArrayWrapper1D name = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceCounterDescriptionARM; }; #endif template <> struct CppType { using Type = PerformanceCounterDescriptionARM; }; // wrapper struct for struct VkPerformanceCounterDescriptionKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterDescriptionKHR.html struct PerformanceCounterDescriptionKHR { using NativeType = VkPerformanceCounterDescriptionKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array const & name_ = {}, std::array const & category_ = {}, std::array const & description_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , name{ name_ } , category{ category_ } , description{ description_ } { } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) { } PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPerformanceCounterDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceCounterDescriptionKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, name, category, description ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) return cmp; if ( auto cmp = strcmp( name, rhs.name ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = strcmp( category, rhs.category ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = strcmp( description, rhs.description ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; return std::strong_ordering::equivalent; } #endif bool operator==( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); } bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::ePerformanceCounterDescriptionKHR; void * pNext = {}; PerformanceCounterDescriptionFlagsKHR flags = {}; ArrayWrapper1D name = {}; ArrayWrapper1D category = {}; ArrayWrapper1D description = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceCounterDescriptionKHR; }; #endif template <> struct CppType { using Type = PerformanceCounterDescriptionKHR; }; // wrapper struct for struct VkPerformanceCounterKHR, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceCounterKHR.html struct PerformanceCounterKHR { using NativeType = VkPerformanceCounterKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterUnitKHR unit_ = PerformanceCounterUnitKHR::eGeneric, PerformanceCounterScopeKHR scope_ = PerformanceCounterScopeKHR::eCommandBuffer, PerformanceCounterStorageKHR storage_ = PerformanceCounterStorageKHR::eInt32, std::array const & uuid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , unit{ unit_ } , scope{ scope_ } , storage{ storage_ } , uuid{ uuid_ } { } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) { } PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPerformanceCounterKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceCounterKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceCounterKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, unit, scope, storage, uuid ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceCounterKHR const & ) const = default; #else bool operator==( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( unit == rhs.unit ) && ( scope == rhs.scope ) && ( storage == rhs.storage ) && ( uuid == rhs.uuid ); # endif } bool operator!=( PerformanceCounterKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceCounterKHR; void * pNext = {}; PerformanceCounterUnitKHR unit = PerformanceCounterUnitKHR::eGeneric; PerformanceCounterScopeKHR scope = PerformanceCounterScopeKHR::eCommandBuffer; PerformanceCounterStorageKHR storage = PerformanceCounterStorageKHR::eInt32; ArrayWrapper1D uuid = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceCounterKHR; }; #endif template <> struct CppType { using Type = PerformanceCounterKHR; }; union PerformanceCounterResultKHR { using NativeType = VkPerformanceCounterResultKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int32_t int32_ = {} ) : int32( int32_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( int64_t int64_ ) : int64( int64_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint32_t uint32_ ) : uint32( uint32_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( uint64_t uint64_ ) : uint64( uint64_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( float float32_ ) : float32( float32_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR( double float64_ ) : float64( float64_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt32( int32_t int32_ ) & VULKAN_HPP_NOEXCEPT { int32 = int32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setInt32( int32_t int32_ ) && VULKAN_HPP_NOEXCEPT { int32 = int32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setInt64( int64_t int64_ ) & VULKAN_HPP_NOEXCEPT { int64 = int64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setInt64( int64_t int64_ ) && VULKAN_HPP_NOEXCEPT { int64 = int64_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) & VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setUint32( uint32_t uint32_ ) && VULKAN_HPP_NOEXCEPT { uint32 = uint32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) & VULKAN_HPP_NOEXCEPT { uint64 = uint64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setUint64( uint64_t uint64_ ) && VULKAN_HPP_NOEXCEPT { uint64 = uint64_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat32( float float32_ ) & VULKAN_HPP_NOEXCEPT { float32 = float32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setFloat32( float float32_ ) && VULKAN_HPP_NOEXCEPT { float32 = float32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR & setFloat64( double float64_ ) & VULKAN_HPP_NOEXCEPT { float64 = float64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterResultKHR && setFloat64( double float64_ ) && VULKAN_HPP_NOEXCEPT { float64 = float64_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceCounterResultKHR const &() const { return *reinterpret_cast( this ); } operator VkPerformanceCounterResultKHR &() { return *reinterpret_cast( this ); } int32_t int32; int64_t int64; uint32_t uint32; uint64_t uint64; float float32; double float64; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceCounterResultKHR; }; #endif // wrapper struct for struct VkPerformanceMarkerInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceMarkerInfoINTEL.html struct PerformanceMarkerInfoINTEL { using NativeType = VkPerformanceMarkerInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , marker{ marker_ } { } VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) { } PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) & VULKAN_HPP_NOEXCEPT { marker = marker_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL && setMarker( uint64_t marker_ ) && VULKAN_HPP_NOEXCEPT { marker = marker_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, marker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; #else bool operator==( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); # endif } bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; const void * pNext = {}; uint64_t marker = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceMarkerInfoINTEL; }; #endif template <> struct CppType { using Type = PerformanceMarkerInfoINTEL; }; // wrapper struct for struct VkPerformanceOverrideInfoINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceOverrideInfoINTEL.html struct PerformanceOverrideInfoINTEL { using NativeType = VkPerformanceOverrideInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideTypeINTEL type_ = PerformanceOverrideTypeINTEL::eNullHardware, Bool32 enable_ = {}, uint64_t parameter_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , type{ type_ } , enable{ enable_ } , parameter{ parameter_ } { } VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceOverrideInfoINTEL( *reinterpret_cast( &rhs ) ) { } PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setType( PerformanceOverrideTypeINTEL type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL && setType( PerformanceOverrideTypeINTEL type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setEnable( Bool32 enable_ ) & VULKAN_HPP_NOEXCEPT { enable = enable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL && setEnable( Bool32 enable_ ) && VULKAN_HPP_NOEXCEPT { enable = enable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) & VULKAN_HPP_NOEXCEPT { parameter = parameter_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL && setParameter( uint64_t parameter_ ) && VULKAN_HPP_NOEXCEPT { parameter = parameter_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceOverrideInfoINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceOverrideInfoINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, type, enable, parameter ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceOverrideInfoINTEL const & ) const = default; #else bool operator==( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ) && ( enable == rhs.enable ) && ( parameter == rhs.parameter ); # endif } bool operator!=( PerformanceOverrideInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; const void * pNext = {}; PerformanceOverrideTypeINTEL type = PerformanceOverrideTypeINTEL::eNullHardware; Bool32 enable = {}; uint64_t parameter = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceOverrideInfoINTEL; }; #endif template <> struct CppType { using Type = PerformanceOverrideInfoINTEL; }; // wrapper struct for struct VkPerformanceQuerySubmitInfoKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceQuerySubmitInfoKHR.html struct PerformanceQuerySubmitInfoKHR { using NativeType = VkPerformanceQuerySubmitInfoKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , counterPassIndex{ counterPassIndex_ } { } VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) { } PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) & VULKAN_HPP_NOEXCEPT { counterPassIndex = counterPassIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR && setCounterPassIndex( uint32_t counterPassIndex_ ) && VULKAN_HPP_NOEXCEPT { counterPassIndex = counterPassIndex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceQuerySubmitInfoKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceQuerySubmitInfoKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, counterPassIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; #else bool operator==( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( counterPassIndex == rhs.counterPassIndex ); # endif } bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR; const void * pNext = {}; uint32_t counterPassIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceQuerySubmitInfoKHR; }; #endif template <> struct CppType { using Type = PerformanceQuerySubmitInfoKHR; }; // wrapper struct for struct VkPerformanceStreamMarkerInfoINTEL, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceStreamMarkerInfoINTEL.html struct PerformanceStreamMarkerInfoINTEL { using NativeType = VkPerformanceStreamMarkerInfoINTEL; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , marker{ marker_ } { } VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast( &rhs ) ) { } PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) & VULKAN_HPP_NOEXCEPT { marker = marker_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL && setMarker( uint32_t marker_ ) && VULKAN_HPP_NOEXCEPT { marker = marker_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceStreamMarkerInfoINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceStreamMarkerInfoINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, marker ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceStreamMarkerInfoINTEL const & ) const = default; #else bool operator==( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( marker == rhs.marker ); # endif } bool operator!=( PerformanceStreamMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; const void * pNext = {}; uint32_t marker = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceStreamMarkerInfoINTEL; }; #endif template <> struct CppType { using Type = PerformanceStreamMarkerInfoINTEL; }; union PerformanceValueDataINTEL { using NativeType = VkPerformanceValueDataINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint32_t value32_ = {} ) : value32( value32_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( uint64_t value64_ ) : value64( value64_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( float valueFloat_ ) : valueFloat( valueFloat_ ) {} VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL( const char * valueString_ ) : valueString( valueString_ ) {} #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_UNION_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) & VULKAN_HPP_NOEXCEPT { value32 = value32_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL && setValue32( uint32_t value32_ ) && VULKAN_HPP_NOEXCEPT { value32 = value32_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) & VULKAN_HPP_NOEXCEPT { value64 = value64_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL && setValue64( uint64_t value64_ ) && VULKAN_HPP_NOEXCEPT { value64 = value64_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) & VULKAN_HPP_NOEXCEPT { valueFloat = valueFloat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL && setValueFloat( float valueFloat_ ) && VULKAN_HPP_NOEXCEPT { valueFloat = valueFloat_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueBool( Bool32 valueBool_ ) & VULKAN_HPP_NOEXCEPT { valueBool = valueBool_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL && setValueBool( Bool32 valueBool_ ) && VULKAN_HPP_NOEXCEPT { valueBool = valueBool_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL & setValueString( const char * valueString_ ) & VULKAN_HPP_NOEXCEPT { valueString = valueString_; return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceValueDataINTEL && setValueString( const char * valueString_ ) && VULKAN_HPP_NOEXCEPT { valueString = valueString_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPerformanceValueDataINTEL const &() const { return *reinterpret_cast( this ); } operator VkPerformanceValueDataINTEL &() { return *reinterpret_cast( this ); } #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS uint32_t value32; uint64_t value64; float valueFloat; Bool32 valueBool; const char * valueString; #else uint32_t value32; uint64_t value64; float valueFloat; VkBool32 valueBool; const char * valueString; #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceValueDataINTEL; }; #endif // wrapper struct for struct VkPerformanceValueINTEL, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPerformanceValueINTEL.html struct PerformanceValueINTEL { using NativeType = VkPerformanceValueINTEL; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueTypeINTEL type_ = PerformanceValueTypeINTEL::eUint32, PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT : type{ type_ } , data{ data_ } { } VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceValueINTEL( *reinterpret_cast( &rhs ) ) { } PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPerformanceValueINTEL const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPerformanceValueINTEL *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( type, data ); } #endif public: PerformanceValueTypeINTEL type = PerformanceValueTypeINTEL::eUint32; PerformanceValueDataINTEL data = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PerformanceValueINTEL; }; #endif // wrapper struct for struct VkPhysicalDevice16BitStorageFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice16BitStorageFeatures.html struct PhysicalDevice16BitStorageFeatures { using NativeType = VkPhysicalDevice16BitStorageFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( Bool32 storageBuffer16BitAccess_ = {}, Bool32 uniformAndStorageBuffer16BitAccess_ = {}, Bool32 storagePushConstant16_ = {}, Bool32 storageInputOutput16_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } , storagePushConstant16{ storagePushConstant16_ } , storageInputOutput16{ storageInputOutput16_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevice16BitStorageFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) & VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures && setStorageBuffer16BitAccess( Bool32 storageBuffer16BitAccess_ ) && VULKAN_HPP_NOEXCEPT { storageBuffer16BitAccess = storageBuffer16BitAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) & VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures && setUniformAndStorageBuffer16BitAccess( Bool32 uniformAndStorageBuffer16BitAccess_ ) && VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( Bool32 storagePushConstant16_ ) & VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures && setStoragePushConstant16( Bool32 storagePushConstant16_ ) && VULKAN_HPP_NOEXCEPT { storagePushConstant16 = storagePushConstant16_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( Bool32 storageInputOutput16_ ) & VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures && setStorageInputOutput16( Bool32 storageInputOutput16_ ) && VULKAN_HPP_NOEXCEPT { storageInputOutput16 = storageInputOutput16_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevice16BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice16BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevice16BitStorageFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, storageBuffer16BitAccess, uniformAndStorageBuffer16BitAccess, storagePushConstant16, storageInputOutput16 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevice16BitStorageFeatures const & ) const = default; #else bool operator==( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) && ( storagePushConstant16 == rhs.storagePushConstant16 ) && ( storageInputOutput16 == rhs.storageInputOutput16 ); # endif } bool operator!=( PhysicalDevice16BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; void * pNext = {}; Bool32 storageBuffer16BitAccess = {}; Bool32 uniformAndStorageBuffer16BitAccess = {}; Bool32 storagePushConstant16 = {}; Bool32 storageInputOutput16 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevice16BitStorageFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevice16BitStorageFeatures; }; using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; // wrapper struct for struct VkPhysicalDevice4444FormatsFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice4444FormatsFeaturesEXT.html struct PhysicalDevice4444FormatsFeaturesEXT { using NativeType = VkPhysicalDevice4444FormatsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( Bool32 formatA4R4G4B4_ = {}, Bool32 formatA4B4G4R4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , formatA4R4G4B4{ formatA4R4G4B4_ } , formatA4B4G4R4{ formatA4B4G4R4_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( Bool32 formatA4R4G4B4_ ) & VULKAN_HPP_NOEXCEPT { formatA4R4G4B4 = formatA4R4G4B4_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT && setFormatA4R4G4B4( Bool32 formatA4R4G4B4_ ) && VULKAN_HPP_NOEXCEPT { formatA4R4G4B4 = formatA4R4G4B4_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( Bool32 formatA4B4G4R4_ ) & VULKAN_HPP_NOEXCEPT { formatA4B4G4R4 = formatA4B4G4R4_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT && setFormatA4B4G4R4( Bool32 formatA4B4G4R4_ ) && VULKAN_HPP_NOEXCEPT { formatA4B4G4R4 = formatA4B4G4R4_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevice4444FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice4444FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevice4444FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, formatA4R4G4B4, formatA4B4G4R4 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 ) && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 ); # endif } bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT; void * pNext = {}; Bool32 formatA4R4G4B4 = {}; Bool32 formatA4B4G4R4 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevice4444FormatsFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevice4444FormatsFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevice8BitStorageFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevice8BitStorageFeatures.html struct PhysicalDevice8BitStorageFeatures { using NativeType = VkPhysicalDevice8BitStorageFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( Bool32 storageBuffer8BitAccess_ = {}, Bool32 uniformAndStorageBuffer8BitAccess_ = {}, Bool32 storagePushConstant8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } , storagePushConstant8{ storagePushConstant8_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevice8BitStorageFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ ) & VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures && setStorageBuffer8BitAccess( Bool32 storageBuffer8BitAccess_ ) && VULKAN_HPP_NOEXCEPT { storageBuffer8BitAccess = storageBuffer8BitAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ ) & VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures && setUniformAndStorageBuffer8BitAccess( Bool32 uniformAndStorageBuffer8BitAccess_ ) && VULKAN_HPP_NOEXCEPT { uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( Bool32 storagePushConstant8_ ) & VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures && setStoragePushConstant8( Bool32 storagePushConstant8_ ) && VULKAN_HPP_NOEXCEPT { storagePushConstant8 = storagePushConstant8_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevice8BitStorageFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevice8BitStorageFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, storageBuffer8BitAccess, uniformAndStorageBuffer8BitAccess, storagePushConstant8 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevice8BitStorageFeatures const & ) const = default; #else bool operator==( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) && ( storagePushConstant8 == rhs.storagePushConstant8 ); # endif } bool operator!=( PhysicalDevice8BitStorageFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures; void * pNext = {}; Bool32 storageBuffer8BitAccess = {}; Bool32 uniformAndStorageBuffer8BitAccess = {}; Bool32 storagePushConstant8 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevice8BitStorageFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevice8BitStorageFeatures; }; using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures; // wrapper struct for struct VkPhysicalDeviceASTCDecodeFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html struct PhysicalDeviceASTCDecodeFeaturesEXT { using NativeType = VkPhysicalDeviceASTCDecodeFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( Bool32 decodeModeSharedExponent_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , decodeModeSharedExponent{ decodeModeSharedExponent_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( Bool32 decodeModeSharedExponent_ ) & VULKAN_HPP_NOEXCEPT { decodeModeSharedExponent = decodeModeSharedExponent_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT && setDecodeModeSharedExponent( Bool32 decodeModeSharedExponent_ ) && VULKAN_HPP_NOEXCEPT { decodeModeSharedExponent = decodeModeSharedExponent_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceASTCDecodeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceASTCDecodeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceASTCDecodeFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, decodeModeSharedExponent ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); # endif } bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; void * pNext = {}; Bool32 decodeModeSharedExponent = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceASTCDecodeFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceASTCDecodeFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceAccelerationStructureFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructureFeaturesKHR.html struct PhysicalDeviceAccelerationStructureFeaturesKHR { using NativeType = VkPhysicalDeviceAccelerationStructureFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( Bool32 accelerationStructure_ = {}, Bool32 accelerationStructureCaptureReplay_ = {}, Bool32 accelerationStructureIndirectBuild_ = {}, Bool32 accelerationStructureHostCommands_ = {}, Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , accelerationStructure{ accelerationStructure_ } , accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ } , accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ } , accelerationStructureHostCommands{ accelerationStructureHostCommands_ } , descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( Bool32 accelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setAccelerationStructure( Bool32 accelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructure = accelerationStructure_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( Bool32 accelerationStructureCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setAccelerationStructureCaptureReplay( Bool32 accelerationStructureCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( Bool32 accelerationStructureIndirectBuild_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setAccelerationStructureIndirectBuild( Bool32 accelerationStructureIndirectBuild_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( Bool32 accelerationStructureHostCommands_ ) & VULKAN_HPP_NOEXCEPT { accelerationStructureHostCommands = accelerationStructureHostCommands_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setAccelerationStructureHostCommands( Bool32 accelerationStructureHostCommands_ ) && VULKAN_HPP_NOEXCEPT { accelerationStructureHostCommands = accelerationStructureHostCommands_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR && setDescriptorBindingAccelerationStructureUpdateAfterBind( Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructureFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, accelerationStructure, accelerationStructureCaptureReplay, accelerationStructureIndirectBuild, accelerationStructureHostCommands, descriptorBindingAccelerationStructureUpdateAfterBind ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( accelerationStructure == rhs.accelerationStructure ) && ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay ) && ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild ) && ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands ) && ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind ); # endif } bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR; void * pNext = {}; Bool32 accelerationStructure = {}; Bool32 accelerationStructureCaptureReplay = {}; Bool32 accelerationStructureIndirectBuild = {}; Bool32 accelerationStructureHostCommands = {}; Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceAccelerationStructureFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceAccelerationStructurePropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAccelerationStructurePropertiesKHR.html struct PhysicalDeviceAccelerationStructurePropertiesKHR { using NativeType = VkPhysicalDeviceAccelerationStructurePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxGeometryCount{ maxGeometryCount_ } , maxInstanceCount{ maxInstanceCount_ } , maxPrimitiveCount{ maxPrimitiveCount_ } , maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ } , maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ } , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } , maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ } , minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAccelerationStructurePropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxGeometryCount, maxInstanceCount, maxPrimitiveCount, maxPerStageDescriptorAccelerationStructures, maxPerStageDescriptorUpdateAfterBindAccelerationStructures, maxDescriptorSetAccelerationStructures, maxDescriptorSetUpdateAfterBindAccelerationStructures, minAccelerationStructureScratchOffsetAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGeometryCount == rhs.maxGeometryCount ) && ( maxInstanceCount == rhs.maxInstanceCount ) && ( maxPrimitiveCount == rhs.maxPrimitiveCount ) && ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures ) && ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures ) && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ) && ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures ) && ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment ); # endif } bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR; void * pNext = {}; uint64_t maxGeometryCount = {}; uint64_t maxInstanceCount = {}; uint64_t maxPrimitiveCount = {}; uint32_t maxPerStageDescriptorAccelerationStructures = {}; uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {}; uint32_t maxDescriptorSetAccelerationStructures = {}; uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {}; uint32_t minAccelerationStructureScratchOffsetAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceAccelerationStructurePropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceAddressBindingReportFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAddressBindingReportFeaturesEXT.html struct PhysicalDeviceAddressBindingReportFeaturesEXT { using NativeType = VkPhysicalDeviceAddressBindingReportFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( Bool32 reportAddressBinding_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , reportAddressBinding{ reportAddressBinding_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAddressBindingReportFeaturesEXT( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAddressBindingReportFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAddressBindingReportFeaturesEXT & operator=( VkPhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT & setReportAddressBinding( Bool32 reportAddressBinding_ ) & VULKAN_HPP_NOEXCEPT { reportAddressBinding = reportAddressBinding_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAddressBindingReportFeaturesEXT && setReportAddressBinding( Bool32 reportAddressBinding_ ) && VULKAN_HPP_NOEXCEPT { reportAddressBinding = reportAddressBinding_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAddressBindingReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAddressBindingReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAddressBindingReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, reportAddressBinding ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAddressBindingReportFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( reportAddressBinding == rhs.reportAddressBinding ); # endif } bool operator!=( PhysicalDeviceAddressBindingReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAddressBindingReportFeaturesEXT; void * pNext = {}; Bool32 reportAddressBinding = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAddressBindingReportFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceAddressBindingReportFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceAmigoProfilingFeaturesSEC, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAmigoProfilingFeaturesSEC.html struct PhysicalDeviceAmigoProfilingFeaturesSEC { using NativeType = VkPhysicalDeviceAmigoProfilingFeaturesSEC; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( Bool32 amigoProfiling_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , amigoProfiling{ amigoProfiling_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAmigoProfilingFeaturesSEC( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAmigoProfilingFeaturesSEC( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAmigoProfilingFeaturesSEC & operator=( VkPhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC & setAmigoProfiling( Bool32 amigoProfiling_ ) & VULKAN_HPP_NOEXCEPT { amigoProfiling = amigoProfiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAmigoProfilingFeaturesSEC && setAmigoProfiling( Bool32 amigoProfiling_ ) && VULKAN_HPP_NOEXCEPT { amigoProfiling = amigoProfiling_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAmigoProfilingFeaturesSEC &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAmigoProfilingFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAmigoProfilingFeaturesSEC *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, amigoProfiling ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAmigoProfilingFeaturesSEC const & ) const = default; #else bool operator==( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( amigoProfiling == rhs.amigoProfiling ); # endif } bool operator!=( PhysicalDeviceAmigoProfilingFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC; void * pNext = {}; Bool32 amigoProfiling = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; }; #endif template <> struct CppType { using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; }; // wrapper struct for struct VkPhysicalDeviceAntiLagFeaturesAMD, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAntiLagFeaturesAMD.html struct PhysicalDeviceAntiLagFeaturesAMD { using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( Bool32 antiLag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , antiLag{ antiLag_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( Bool32 antiLag_ ) & VULKAN_HPP_NOEXCEPT { antiLag = antiLag_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD && setAntiLag( Bool32 antiLag_ ) && VULKAN_HPP_NOEXCEPT { antiLag = antiLag_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAntiLagFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAntiLagFeaturesAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, antiLag ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default; #else bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( antiLag == rhs.antiLag ); # endif } bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; void * pNext = {}; Bool32 antiLag = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAntiLagFeaturesAMD; }; #endif template <> struct CppType { using Type = PhysicalDeviceAntiLagFeaturesAMD; }; // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT.html struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( Bool32 attachmentFeedbackLoopDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & operator=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT & setAttachmentFeedbackLoopDynamicState( Bool32 attachmentFeedbackLoopDynamicState_ ) & VULKAN_HPP_NOEXCEPT { attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT && setAttachmentFeedbackLoopDynamicState( Bool32 attachmentFeedbackLoopDynamicState_ ) && VULKAN_HPP_NOEXCEPT { attachmentFeedbackLoopDynamicState = attachmentFeedbackLoopDynamicState_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, attachmentFeedbackLoopDynamicState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopDynamicState == rhs.attachmentFeedbackLoopDynamicState ); # endif } bool operator!=( PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; void * pNext = {}; Bool32 attachmentFeedbackLoopDynamicState = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.html struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( Bool32 attachmentFeedbackLoopLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & operator=( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT & setAttachmentFeedbackLoopLayout( Bool32 attachmentFeedbackLoopLayout_ ) & VULKAN_HPP_NOEXCEPT { attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT && setAttachmentFeedbackLoopLayout( Bool32 attachmentFeedbackLoopLayout_ ) && VULKAN_HPP_NOEXCEPT { attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, attachmentFeedbackLoopLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( attachmentFeedbackLoopLayout == rhs.attachmentFeedbackLoopLayout ); # endif } bool operator!=( PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; void * pNext = {}; Bool32 attachmentFeedbackLoopLayout = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT { using NativeType = VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( Bool32 advancedBlendCoherentOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ ) & VULKAN_HPP_NOEXCEPT { advancedBlendCoherentOperations = advancedBlendCoherentOperations_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT && setAdvancedBlendCoherentOperations( Bool32 advancedBlendCoherentOperations_ ) && VULKAN_HPP_NOEXCEPT { advancedBlendCoherentOperations = advancedBlendCoherentOperations_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, advancedBlendCoherentOperations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); # endif } bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; void * pNext = {}; Bool32 advancedBlendCoherentOperations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT { using NativeType = VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, Bool32 advancedBlendIndependentBlend_ = {}, Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, Bool32 advancedBlendNonPremultipliedDstColor_ = {}, Bool32 advancedBlendCorrelatedOverlap_ = {}, Bool32 advancedBlendAllOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ } , advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ } , advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ } , advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ } , advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ } , advancedBlendAllOperations{ advancedBlendAllOperations_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, advancedBlendMaxColorAttachments, advancedBlendIndependentBlend, advancedBlendNonPremultipliedSrcColor, advancedBlendNonPremultipliedDstColor, advancedBlendCorrelatedOverlap, advancedBlendAllOperations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); # endif } bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; void * pNext = {}; uint32_t advancedBlendMaxColorAttachments = {}; Bool32 advancedBlendIndependentBlend = {}; Bool32 advancedBlendNonPremultipliedSrcColor = {}; Bool32 advancedBlendNonPremultipliedDstColor = {}; Bool32 advancedBlendCorrelatedOverlap = {}; Bool32 advancedBlendAllOperations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBorderColorSwizzleFeaturesEXT.html struct PhysicalDeviceBorderColorSwizzleFeaturesEXT { using NativeType = VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( Bool32 borderColorSwizzle_ = {}, Bool32 borderColorSwizzleFromImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , borderColorSwizzle{ borderColorSwizzle_ } , borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBorderColorSwizzleFeaturesEXT( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBorderColorSwizzleFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceBorderColorSwizzleFeaturesEXT & operator=( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzle( Bool32 borderColorSwizzle_ ) & VULKAN_HPP_NOEXCEPT { borderColorSwizzle = borderColorSwizzle_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT && setBorderColorSwizzle( Bool32 borderColorSwizzle_ ) && VULKAN_HPP_NOEXCEPT { borderColorSwizzle = borderColorSwizzle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT & setBorderColorSwizzleFromImage( Bool32 borderColorSwizzleFromImage_ ) & VULKAN_HPP_NOEXCEPT { borderColorSwizzleFromImage = borderColorSwizzleFromImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBorderColorSwizzleFeaturesEXT && setBorderColorSwizzleFromImage( Bool32 borderColorSwizzleFromImage_ ) && VULKAN_HPP_NOEXCEPT { borderColorSwizzleFromImage = borderColorSwizzleFromImage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceBorderColorSwizzleFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, borderColorSwizzle, borderColorSwizzleFromImage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( borderColorSwizzle == rhs.borderColorSwizzle ) && ( borderColorSwizzleFromImage == rhs.borderColorSwizzleFromImage ); # endif } bool operator!=( PhysicalDeviceBorderColorSwizzleFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceBorderColorSwizzleFeaturesEXT; void * pNext = {}; Bool32 borderColorSwizzle = {}; Bool32 borderColorSwizzleFromImage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceBorderColorSwizzleFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeatures.html struct PhysicalDeviceBufferDeviceAddressFeatures { using NativeType = VkPhysicalDeviceBufferDeviceAddressFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( Bool32 bufferDeviceAddress_ = {}, Bool32 bufferDeviceAddressCaptureReplay_ = {}, Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , bufferDeviceAddress{ bufferDeviceAddress_ } , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures && setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures && setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures && setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceBufferDeviceAddressFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const & ) const = default; #else bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); # endif } bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures; void * pNext = {}; Bool32 bufferDeviceAddress = {}; Bool32 bufferDeviceAddressCaptureReplay = {}; Bool32 bufferDeviceAddressMultiDevice = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeatures; }; using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures; // wrapper struct for struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceBufferDeviceAddressFeaturesEXT.html struct PhysicalDeviceBufferDeviceAddressFeaturesEXT { using NativeType = VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( Bool32 bufferDeviceAddress_ = {}, Bool32 bufferDeviceAddressCaptureReplay_ = {}, Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , bufferDeviceAddress{ bufferDeviceAddress_ } , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT && setBufferDeviceAddress( Bool32 bufferDeviceAddress_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddress = bufferDeviceAddress_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT && setBufferDeviceAddressCaptureReplay( Bool32 bufferDeviceAddressCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) & VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT && setBufferDeviceAddressMultiDevice( Bool32 bufferDeviceAddressMultiDevice_ ) && VULKAN_HPP_NOEXCEPT { bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, bufferDeviceAddress, bufferDeviceAddressCaptureReplay, bufferDeviceAddressMultiDevice ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); # endif } bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; void * pNext = {}; Bool32 bufferDeviceAddress = {}; Bool32 bufferDeviceAddressCaptureReplay = {}; Bool32 bufferDeviceAddressMultiDevice = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT; }; using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructureFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructureFeaturesNV.html struct PhysicalDeviceClusterAccelerationStructureFeaturesNV { using NativeType = VkPhysicalDeviceClusterAccelerationStructureFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( Bool32 clusterAccelerationStructure_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , clusterAccelerationStructure{ clusterAccelerationStructure_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructureFeaturesNV( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceClusterAccelerationStructureFeaturesNV( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceClusterAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceClusterAccelerationStructureFeaturesNV & operator=( VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV & setClusterAccelerationStructure( Bool32 clusterAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { clusterAccelerationStructure = clusterAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterAccelerationStructureFeaturesNV && setClusterAccelerationStructure( Bool32 clusterAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { clusterAccelerationStructure = clusterAccelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, clusterAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterAccelerationStructure == rhs.clusterAccelerationStructure ); # endif } bool operator!=( PhysicalDeviceClusterAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructureFeaturesNV; void * pNext = {}; Bool32 clusterAccelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceClusterAccelerationStructureFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceClusterAccelerationStructurePropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterAccelerationStructurePropertiesNV.html struct PhysicalDeviceClusterAccelerationStructurePropertiesNV { using NativeType = VkPhysicalDeviceClusterAccelerationStructurePropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( uint32_t maxVerticesPerCluster_ = {}, uint32_t maxTrianglesPerCluster_ = {}, uint32_t clusterScratchByteAlignment_ = {}, uint32_t clusterByteAlignment_ = {}, uint32_t clusterTemplateByteAlignment_ = {}, uint32_t clusterBottomLevelByteAlignment_ = {}, uint32_t clusterTemplateBoundsByteAlignment_ = {}, uint32_t maxClusterGeometryIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxVerticesPerCluster{ maxVerticesPerCluster_ } , maxTrianglesPerCluster{ maxTrianglesPerCluster_ } , clusterScratchByteAlignment{ clusterScratchByteAlignment_ } , clusterByteAlignment{ clusterByteAlignment_ } , clusterTemplateByteAlignment{ clusterTemplateByteAlignment_ } , clusterBottomLevelByteAlignment{ clusterBottomLevelByteAlignment_ } , clusterTemplateBoundsByteAlignment{ clusterTemplateBoundsByteAlignment_ } , maxClusterGeometryIndex{ maxClusterGeometryIndex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterAccelerationStructurePropertiesNV( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceClusterAccelerationStructurePropertiesNV( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceClusterAccelerationStructurePropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceClusterAccelerationStructurePropertiesNV & operator=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceClusterAccelerationStructurePropertiesNV & operator=( VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceClusterAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxVerticesPerCluster, maxTrianglesPerCluster, clusterScratchByteAlignment, clusterByteAlignment, clusterTemplateByteAlignment, clusterBottomLevelByteAlignment, clusterTemplateBoundsByteAlignment, maxClusterGeometryIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVerticesPerCluster == rhs.maxVerticesPerCluster ) && ( maxTrianglesPerCluster == rhs.maxTrianglesPerCluster ) && ( clusterScratchByteAlignment == rhs.clusterScratchByteAlignment ) && ( clusterByteAlignment == rhs.clusterByteAlignment ) && ( clusterTemplateByteAlignment == rhs.clusterTemplateByteAlignment ) && ( clusterBottomLevelByteAlignment == rhs.clusterBottomLevelByteAlignment ) && ( clusterTemplateBoundsByteAlignment == rhs.clusterTemplateBoundsByteAlignment ) && ( maxClusterGeometryIndex == rhs.maxClusterGeometryIndex ); # endif } bool operator!=( PhysicalDeviceClusterAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceClusterAccelerationStructurePropertiesNV; void * pNext = {}; uint32_t maxVerticesPerCluster = {}; uint32_t maxTrianglesPerCluster = {}; uint32_t clusterScratchByteAlignment = {}; uint32_t clusterByteAlignment = {}; uint32_t clusterTemplateByteAlignment = {}; uint32_t clusterBottomLevelByteAlignment = {}; uint32_t clusterTemplateBoundsByteAlignment = {}; uint32_t maxClusterGeometryIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceClusterAccelerationStructurePropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI.html struct PhysicalDeviceClusterCullingShaderFeaturesHUAWEI { using NativeType = VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( Bool32 clustercullingShader_ = {}, Bool32 multiviewClusterCullingShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , clustercullingShader{ clustercullingShader_ } , multiviewClusterCullingShader{ multiviewClusterCullingShader_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setClustercullingShader( Bool32 clustercullingShader_ ) & VULKAN_HPP_NOEXCEPT { clustercullingShader = clustercullingShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI && setClustercullingShader( Bool32 clustercullingShader_ ) && VULKAN_HPP_NOEXCEPT { clustercullingShader = clustercullingShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI & setMultiviewClusterCullingShader( Bool32 multiviewClusterCullingShader_ ) & VULKAN_HPP_NOEXCEPT { multiviewClusterCullingShader = multiviewClusterCullingShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderFeaturesHUAWEI && setMultiviewClusterCullingShader( Bool32 multiviewClusterCullingShader_ ) && VULKAN_HPP_NOEXCEPT { multiviewClusterCullingShader = multiviewClusterCullingShader_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, clustercullingShader, multiviewClusterCullingShader ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & ) const = default; #else bool operator==( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clustercullingShader == rhs.clustercullingShader ) && ( multiviewClusterCullingShader == rhs.multiviewClusterCullingShader ); # endif } bool operator!=( PhysicalDeviceClusterCullingShaderFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI; void * pNext = {}; Bool32 clustercullingShader = {}; Bool32 multiviewClusterCullingShader = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; }; #endif template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderFeaturesHUAWEI; }; // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI.html struct PhysicalDeviceClusterCullingShaderPropertiesHUAWEI { using NativeType = VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( std::array const & maxWorkGroupCount_ = {}, std::array const & maxWorkGroupSize_ = {}, uint32_t maxOutputClusterCount_ = {}, DeviceSize indirectBufferOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxWorkGroupCount{ maxWorkGroupCount_ } , maxWorkGroupSize{ maxWorkGroupSize_ } , maxOutputClusterCount{ maxOutputClusterCount_ } , indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceClusterCullingShaderPropertiesHUAWEI( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceClusterCullingShaderPropertiesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &, uint32_t const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxWorkGroupCount, maxWorkGroupSize, maxOutputClusterCount, indirectBufferOffsetAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & ) const = default; #else bool operator==( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWorkGroupCount == rhs.maxWorkGroupCount ) && ( maxWorkGroupSize == rhs.maxWorkGroupSize ) && ( maxOutputClusterCount == rhs.maxOutputClusterCount ) && ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment ); # endif } bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI; void * pNext = {}; ArrayWrapper1D maxWorkGroupCount = {}; ArrayWrapper1D maxWorkGroupSize = {}; uint32_t maxOutputClusterCount = {}; DeviceSize indirectBufferOffsetAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; }; #endif template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderPropertiesHUAWEI; }; // wrapper struct for struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI.html struct PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI { using NativeType = VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( Bool32 clusterShadingRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , clusterShadingRate{ clusterShadingRate_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & operator=( VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI & setClusterShadingRate( Bool32 clusterShadingRate_ ) & VULKAN_HPP_NOEXCEPT { clusterShadingRate = clusterShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI && setClusterShadingRate( Bool32 clusterShadingRate_ ) && VULKAN_HPP_NOEXCEPT { clusterShadingRate = clusterShadingRate_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, clusterShadingRate ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & ) const = default; #else bool operator==( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( clusterShadingRate == rhs.clusterShadingRate ); # endif } bool operator!=( PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; void * pNext = {}; Bool32 clusterShadingRate = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; }; #endif template <> struct CppType { using Type = PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; }; // wrapper struct for struct VkPhysicalDeviceCoherentMemoryFeaturesAMD, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoherentMemoryFeaturesAMD.html struct PhysicalDeviceCoherentMemoryFeaturesAMD { using NativeType = VkPhysicalDeviceCoherentMemoryFeaturesAMD; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( Bool32 deviceCoherentMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceCoherentMemory{ deviceCoherentMemory_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( Bool32 deviceCoherentMemory_ ) & VULKAN_HPP_NOEXCEPT { deviceCoherentMemory = deviceCoherentMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD && setDeviceCoherentMemory( Bool32 deviceCoherentMemory_ ) && VULKAN_HPP_NOEXCEPT { deviceCoherentMemory = deviceCoherentMemory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCoherentMemoryFeaturesAMD *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceCoherentMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const & ) const = default; #else bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); # endif } bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; void * pNext = {}; Bool32 deviceCoherentMemory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; }; #endif template <> struct CppType { using Type = PhysicalDeviceCoherentMemoryFeaturesAMD; }; // wrapper struct for struct VkPhysicalDeviceColorWriteEnableFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceColorWriteEnableFeaturesEXT.html struct PhysicalDeviceColorWriteEnableFeaturesEXT { using NativeType = VkPhysicalDeviceColorWriteEnableFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( Bool32 colorWriteEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorWriteEnable{ colorWriteEnable_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceColorWriteEnableFeaturesEXT( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceColorWriteEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceColorWriteEnableFeaturesEXT & operator=( VkPhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT & setColorWriteEnable( Bool32 colorWriteEnable_ ) & VULKAN_HPP_NOEXCEPT { colorWriteEnable = colorWriteEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceColorWriteEnableFeaturesEXT && setColorWriteEnable( Bool32 colorWriteEnable_ ) && VULKAN_HPP_NOEXCEPT { colorWriteEnable = colorWriteEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceColorWriteEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceColorWriteEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, colorWriteEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceColorWriteEnableFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( colorWriteEnable == rhs.colorWriteEnable ); # endif } bool operator!=( PhysicalDeviceColorWriteEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceColorWriteEnableFeaturesEXT; void * pNext = {}; Bool32 colorWriteEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCommandBufferInheritanceFeaturesNV.html struct PhysicalDeviceCommandBufferInheritanceFeaturesNV { using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( Bool32 commandBufferInheritance_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , commandBufferInheritance{ commandBufferInheritance_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setCommandBufferInheritance( Bool32 commandBufferInheritance_ ) & VULKAN_HPP_NOEXCEPT { commandBufferInheritance = commandBufferInheritance_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV && setCommandBufferInheritance( Bool32 commandBufferInheritance_ ) && VULKAN_HPP_NOEXCEPT { commandBufferInheritance = commandBufferInheritance_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, commandBufferInheritance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBufferInheritance == rhs.commandBufferInheritance ); # endif } bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; void * pNext = {}; Bool32 commandBufferInheritance = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV.html struct PhysicalDeviceComputeOccupancyPriorityFeaturesNV { using NativeType = VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeOccupancyPriorityFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeOccupancyPriorityFeaturesNV( Bool32 computeOccupancyPriority_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , computeOccupancyPriority{ computeOccupancyPriority_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeOccupancyPriorityFeaturesNV( PhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeOccupancyPriorityFeaturesNV( VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceComputeOccupancyPriorityFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceComputeOccupancyPriorityFeaturesNV & operator=( PhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceComputeOccupancyPriorityFeaturesNV & operator=( VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeOccupancyPriorityFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeOccupancyPriorityFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeOccupancyPriorityFeaturesNV & setComputeOccupancyPriority( Bool32 computeOccupancyPriority_ ) & VULKAN_HPP_NOEXCEPT { computeOccupancyPriority = computeOccupancyPriority_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeOccupancyPriorityFeaturesNV && setComputeOccupancyPriority( Bool32 computeOccupancyPriority_ ) && VULKAN_HPP_NOEXCEPT { computeOccupancyPriority = computeOccupancyPriority_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceComputeOccupancyPriorityFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, computeOccupancyPriority ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceComputeOccupancyPriorityFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeOccupancyPriority == rhs.computeOccupancyPriority ); # endif } bool operator!=( PhysicalDeviceComputeOccupancyPriorityFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceComputeOccupancyPriorityFeaturesNV; void * pNext = {}; Bool32 computeOccupancyPriority = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceComputeOccupancyPriorityFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceComputeOccupancyPriorityFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR.html struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR { using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( Bool32 computeDerivativeGroupQuads_ = {}, Bool32 computeDerivativeGroupLinear_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ } , computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupQuads( Bool32 computeDerivativeGroupQuads_ ) & VULKAN_HPP_NOEXCEPT { computeDerivativeGroupQuads = computeDerivativeGroupQuads_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR && setComputeDerivativeGroupQuads( Bool32 computeDerivativeGroupQuads_ ) && VULKAN_HPP_NOEXCEPT { computeDerivativeGroupQuads = computeDerivativeGroupQuads_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupLinear( Bool32 computeDerivativeGroupLinear_ ) & VULKAN_HPP_NOEXCEPT { computeDerivativeGroupLinear = computeDerivativeGroupLinear_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR && setComputeDerivativeGroupLinear( Bool32 computeDerivativeGroupLinear_ ) && VULKAN_HPP_NOEXCEPT { computeDerivativeGroupLinear = computeDerivativeGroupLinear_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, computeDerivativeGroupQuads, computeDerivativeGroupLinear ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); # endif } bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; void * pNext = {}; Bool32 computeDerivativeGroupQuads = {}; Bool32 computeDerivativeGroupLinear = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; }; using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; // wrapper struct for struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR.html struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR { using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( Bool32 meshAndTaskShaderDerivatives_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, meshAndTaskShaderDerivatives ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives ); # endif } bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; void * pNext = {}; Bool32 meshAndTaskShaderDerivatives = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceConditionalRenderingFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html struct PhysicalDeviceConditionalRenderingFeaturesEXT { using NativeType = VkPhysicalDeviceConditionalRenderingFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( Bool32 conditionalRendering_ = {}, Bool32 inheritedConditionalRendering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , conditionalRendering{ conditionalRendering_ } , inheritedConditionalRendering{ inheritedConditionalRendering_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( Bool32 conditionalRendering_ ) & VULKAN_HPP_NOEXCEPT { conditionalRendering = conditionalRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT && setConditionalRendering( Bool32 conditionalRendering_ ) && VULKAN_HPP_NOEXCEPT { conditionalRendering = conditionalRendering_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( Bool32 inheritedConditionalRendering_ ) & VULKAN_HPP_NOEXCEPT { inheritedConditionalRendering = inheritedConditionalRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT && setInheritedConditionalRendering( Bool32 inheritedConditionalRendering_ ) && VULKAN_HPP_NOEXCEPT { inheritedConditionalRendering = inheritedConditionalRendering_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceConditionalRenderingFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, conditionalRendering, inheritedConditionalRendering ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( conditionalRendering == rhs.conditionalRendering ) && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); # endif } bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; void * pNext = {}; Bool32 conditionalRendering = {}; Bool32 inheritedConditionalRendering = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceConditionalRenderingFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html struct PhysicalDeviceConservativeRasterizationPropertiesEXT { using NativeType = VkPhysicalDeviceConservativeRasterizationPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, Bool32 primitiveUnderestimation_ = {}, Bool32 conservativePointAndLineRasterization_ = {}, Bool32 degenerateTrianglesRasterized_ = {}, Bool32 degenerateLinesRasterized_ = {}, Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, Bool32 conservativeRasterizationPostDepthCoverage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , primitiveOverestimationSize{ primitiveOverestimationSize_ } , maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ } , extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ } , primitiveUnderestimation{ primitiveUnderestimation_ } , conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ } , degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ } , degenerateLinesRasterized{ degenerateLinesRasterized_ } , fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ } , conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, primitiveOverestimationSize, maxExtraPrimitiveOverestimationSize, extraPrimitiveOverestimationSizeGranularity, primitiveUnderestimation, conservativePointAndLineRasterization, degenerateTrianglesRasterized, degenerateLinesRasterized, fullyCoveredFragmentShaderInputVariable, conservativeRasterizationPostDepthCoverage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); # endif } bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; void * pNext = {}; float primitiveOverestimationSize = {}; float maxExtraPrimitiveOverestimationSize = {}; float extraPrimitiveOverestimationSizeGranularity = {}; Bool32 primitiveUnderestimation = {}; Bool32 conservativePointAndLineRasterization = {}; Bool32 degenerateTrianglesRasterized = {}; Bool32 degenerateLinesRasterized = {}; Bool32 fullyCoveredFragmentShaderInputVariable = {}; Bool32 conservativeRasterizationPostDepthCoverage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2FeaturesNV.html struct PhysicalDeviceCooperativeMatrix2FeaturesNV { using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( Bool32 cooperativeMatrixWorkgroupScope_ = {}, Bool32 cooperativeMatrixFlexibleDimensions_ = {}, Bool32 cooperativeMatrixReductions_ = {}, Bool32 cooperativeMatrixConversions_ = {}, Bool32 cooperativeMatrixPerElementOperations_ = {}, Bool32 cooperativeMatrixTensorAddressing_ = {}, Bool32 cooperativeMatrixBlockLoads_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ } , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ } , cooperativeMatrixReductions{ cooperativeMatrixReductions_ } , cooperativeMatrixConversions{ cooperativeMatrixConversions_ } , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ } , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ } , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixWorkgroupScope( Bool32 cooperativeMatrixWorkgroupScope_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixWorkgroupScope( Bool32 cooperativeMatrixWorkgroupScope_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixFlexibleDimensions( Bool32 cooperativeMatrixFlexibleDimensions_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixFlexibleDimensions( Bool32 cooperativeMatrixFlexibleDimensions_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixReductions( Bool32 cooperativeMatrixReductions_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixReductions = cooperativeMatrixReductions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixReductions( Bool32 cooperativeMatrixReductions_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixReductions = cooperativeMatrixReductions_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixConversions( Bool32 cooperativeMatrixConversions_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixConversions = cooperativeMatrixConversions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixConversions( Bool32 cooperativeMatrixConversions_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixConversions = cooperativeMatrixConversions_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixPerElementOperations( Bool32 cooperativeMatrixPerElementOperations_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixPerElementOperations( Bool32 cooperativeMatrixPerElementOperations_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixTensorAddressing( Bool32 cooperativeMatrixTensorAddressing_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixTensorAddressing( Bool32 cooperativeMatrixTensorAddressing_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setCooperativeMatrixBlockLoads( Bool32 cooperativeMatrixBlockLoads_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV && setCooperativeMatrixBlockLoads( Bool32 cooperativeMatrixBlockLoads_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrixWorkgroupScope, cooperativeMatrixFlexibleDimensions, cooperativeMatrixReductions, cooperativeMatrixConversions, cooperativeMatrixPerElementOperations, cooperativeMatrixTensorAddressing, cooperativeMatrixBlockLoads ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) && ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) && ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) && ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) && ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) && ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; void * pNext = {}; Bool32 cooperativeMatrixWorkgroupScope = {}; Bool32 cooperativeMatrixFlexibleDimensions = {}; Bool32 cooperativeMatrixReductions = {}; Bool32 cooperativeMatrixConversions = {}; Bool32 cooperativeMatrixPerElementOperations = {}; Bool32 cooperativeMatrixTensorAddressing = {}; Bool32 cooperativeMatrixBlockLoads = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrix2PropertiesNV.html struct PhysicalDeviceCooperativeMatrix2PropertiesNV { using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ } , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ } , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, cooperativeMatrixFlexibleDimensionsMaxDimension, cooperativeMatrixWorkgroupScopeReservedSharedMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) && ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) && ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; void * pNext = {}; uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {}; uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {}; uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM.html struct PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM { using NativeType = VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( Bool32 cooperativeMatrixConversion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrixConversion{ cooperativeMatrixConversion_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & operator=( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & operator=( VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM & setCooperativeMatrixConversion( Bool32 cooperativeMatrixConversion_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixConversion = cooperativeMatrixConversion_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM && setCooperativeMatrixConversion( Bool32 cooperativeMatrixConversion_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixConversion = cooperativeMatrixConversion_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixConversionFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrixConversion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixConversion == rhs.cooperativeMatrixConversion ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; void * pNext = {}; Bool32 cooperativeMatrixConversion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixConversionFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesKHR.html struct PhysicalDeviceCooperativeMatrixFeaturesKHR { using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( Bool32 cooperativeMatrix_ = {}, Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrix{ cooperativeMatrix_ } , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixFeaturesKHR( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixFeaturesKHR & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setCooperativeMatrix( Bool32 cooperativeMatrix_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR && setCooperativeMatrix( Bool32 cooperativeMatrix_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR & setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesKHR && setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; void * pNext = {}; Bool32 cooperativeMatrix = {}; Bool32 cooperativeMatrixRobustBufferAccess = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesNV.html struct PhysicalDeviceCooperativeMatrixFeaturesNV { using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( Bool32 cooperativeMatrix_ = {}, Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrix{ cooperativeMatrix_ } , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( Bool32 cooperativeMatrix_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV && setCooperativeMatrix( Bool32 cooperativeMatrix_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrix = cooperativeMatrix_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) & VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV && setCooperativeMatrixRobustBufferAccess( Bool32 cooperativeMatrixRobustBufferAccess_ ) && VULKAN_HPP_NOEXCEPT { cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; void * pNext = {}; Bool32 cooperativeMatrix = {}; Bool32 cooperativeMatrixRobustBufferAccess = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesKHR.html struct PhysicalDeviceCooperativeMatrixPropertiesKHR { using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixPropertiesKHR & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; void * pNext = {}; ShaderStageFlags cooperativeMatrixSupportedStages = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceCooperativeMatrixPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesNV.html struct PhysicalDeviceCooperativeMatrixPropertiesNV { using NativeType = VkPhysicalDeviceCooperativeMatrixPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeMatrixPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); # endif } bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; void * pNext = {}; ShaderStageFlags cooperativeMatrixSupportedStages = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceCooperativeVectorFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorFeaturesNV.html struct PhysicalDeviceCooperativeVectorFeaturesNV { using NativeType = VkPhysicalDeviceCooperativeVectorFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( Bool32 cooperativeVector_ = {}, Bool32 cooperativeVectorTraining_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeVector{ cooperativeVector_ } , cooperativeVectorTraining{ cooperativeVectorTraining_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorFeaturesNV( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeVectorFeaturesNV( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeVectorFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeVectorFeaturesNV & operator=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeVectorFeaturesNV & operator=( VkPhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVector( Bool32 cooperativeVector_ ) & VULKAN_HPP_NOEXCEPT { cooperativeVector = cooperativeVector_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV && setCooperativeVector( Bool32 cooperativeVector_ ) && VULKAN_HPP_NOEXCEPT { cooperativeVector = cooperativeVector_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV & setCooperativeVectorTraining( Bool32 cooperativeVectorTraining_ ) & VULKAN_HPP_NOEXCEPT { cooperativeVectorTraining = cooperativeVectorTraining_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeVectorFeaturesNV && setCooperativeVectorTraining( Bool32 cooperativeVectorTraining_ ) && VULKAN_HPP_NOEXCEPT { cooperativeVectorTraining = cooperativeVectorTraining_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCooperativeVectorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeVector, cooperativeVectorTraining ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeVectorFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVector == rhs.cooperativeVector ) && ( cooperativeVectorTraining == rhs.cooperativeVectorTraining ); # endif } bool operator!=( PhysicalDeviceCooperativeVectorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorFeaturesNV; void * pNext = {}; Bool32 cooperativeVector = {}; Bool32 cooperativeVectorTraining = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeVectorFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeVectorFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCooperativeVectorPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCooperativeVectorPropertiesNV.html struct PhysicalDeviceCooperativeVectorPropertiesNV { using NativeType = VkPhysicalDeviceCooperativeVectorPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( ShaderStageFlags cooperativeVectorSupportedStages_ = {}, Bool32 cooperativeVectorTrainingFloat16Accumulation_ = {}, Bool32 cooperativeVectorTrainingFloat32Accumulation_ = {}, uint32_t maxCooperativeVectorComponents_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cooperativeVectorSupportedStages{ cooperativeVectorSupportedStages_ } , cooperativeVectorTrainingFloat16Accumulation{ cooperativeVectorTrainingFloat16Accumulation_ } , cooperativeVectorTrainingFloat32Accumulation{ cooperativeVectorTrainingFloat32Accumulation_ } , maxCooperativeVectorComponents{ maxCooperativeVectorComponents_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeVectorPropertiesNV( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCooperativeVectorPropertiesNV( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCooperativeVectorPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCooperativeVectorPropertiesNV & operator=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCooperativeVectorPropertiesNV & operator=( VkPhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCooperativeVectorPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCooperativeVectorPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cooperativeVectorSupportedStages, cooperativeVectorTrainingFloat16Accumulation, cooperativeVectorTrainingFloat32Accumulation, maxCooperativeVectorComponents ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeVectorPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeVectorSupportedStages == rhs.cooperativeVectorSupportedStages ) && ( cooperativeVectorTrainingFloat16Accumulation == rhs.cooperativeVectorTrainingFloat16Accumulation ) && ( cooperativeVectorTrainingFloat32Accumulation == rhs.cooperativeVectorTrainingFloat32Accumulation ) && ( maxCooperativeVectorComponents == rhs.maxCooperativeVectorComponents ); # endif } bool operator!=( PhysicalDeviceCooperativeVectorPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCooperativeVectorPropertiesNV; void * pNext = {}; ShaderStageFlags cooperativeVectorSupportedStages = {}; Bool32 cooperativeVectorTrainingFloat16Accumulation = {}; Bool32 cooperativeVectorTrainingFloat32Accumulation = {}; uint32_t maxCooperativeVectorComponents = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCooperativeVectorPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCooperativeVectorPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR.html struct PhysicalDeviceCopyMemoryIndirectFeaturesKHR { using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesKHR( Bool32 indirectMemoryCopy_ = {}, Bool32 indirectMemoryToImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indirectMemoryCopy{ indirectMemoryCopy_ } , indirectMemoryToImageCopy{ indirectMemoryToImageCopy_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesKHR( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCopyMemoryIndirectFeaturesKHR( VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCopyMemoryIndirectFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCopyMemoryIndirectFeaturesKHR & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCopyMemoryIndirectFeaturesKHR & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setIndirectMemoryCopy( Bool32 indirectMemoryCopy_ ) & VULKAN_HPP_NOEXCEPT { indirectMemoryCopy = indirectMemoryCopy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR && setIndirectMemoryCopy( Bool32 indirectMemoryCopy_ ) && VULKAN_HPP_NOEXCEPT { indirectMemoryCopy = indirectMemoryCopy_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR & setIndirectMemoryToImageCopy( Bool32 indirectMemoryToImageCopy_ ) & VULKAN_HPP_NOEXCEPT { indirectMemoryToImageCopy = indirectMemoryToImageCopy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesKHR && setIndirectMemoryToImageCopy( Bool32 indirectMemoryToImageCopy_ ) && VULKAN_HPP_NOEXCEPT { indirectMemoryToImageCopy = indirectMemoryToImageCopy_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, indirectMemoryCopy, indirectMemoryToImageCopy ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectMemoryCopy == rhs.indirectMemoryCopy ) && ( indirectMemoryToImageCopy == rhs.indirectMemoryToImageCopy ); # endif } bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesKHR; void * pNext = {}; Bool32 indirectMemoryCopy = {}; Bool32 indirectMemoryToImageCopy = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesNV.html struct PhysicalDeviceCopyMemoryIndirectFeaturesNV { using NativeType = VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( Bool32 indirectCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indirectCopy{ indirectCopy_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCopyMemoryIndirectFeaturesNV( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCopyMemoryIndirectFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCopyMemoryIndirectFeaturesNV & operator=( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV & setIndirectCopy( Bool32 indirectCopy_ ) & VULKAN_HPP_NOEXCEPT { indirectCopy = indirectCopy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCopyMemoryIndirectFeaturesNV && setIndirectCopy( Bool32 indirectCopy_ ) && VULKAN_HPP_NOEXCEPT { indirectCopy = indirectCopy_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, indirectCopy ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); # endif } bool operator!=( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; void * pNext = {}; Bool32 indirectCopy = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR.html struct PhysicalDeviceCopyMemoryIndirectPropertiesKHR { using NativeType = VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesKHR( QueueFlags supportedQueues_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , supportedQueues{ supportedQueues_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesKHR( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCopyMemoryIndirectPropertiesKHR( VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCopyMemoryIndirectPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCopyMemoryIndirectPropertiesKHR & operator=( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCopyMemoryIndirectPropertiesKHR & operator=( VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCopyMemoryIndirectPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, supportedQueues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedQueues == rhs.supportedQueues ); # endif } bool operator!=( PhysicalDeviceCopyMemoryIndirectPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectPropertiesKHR; void * pNext = {}; QueueFlags supportedQueues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceCopyMemoryIndirectPropertiesKHR; }; using PhysicalDeviceCopyMemoryIndirectPropertiesNV = PhysicalDeviceCopyMemoryIndirectPropertiesKHR; // wrapper struct for struct VkPhysicalDeviceCornerSampledImageFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html struct PhysicalDeviceCornerSampledImageFeaturesNV { using NativeType = VkPhysicalDeviceCornerSampledImageFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( Bool32 cornerSampledImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cornerSampledImage{ cornerSampledImage_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( Bool32 cornerSampledImage_ ) & VULKAN_HPP_NOEXCEPT { cornerSampledImage = cornerSampledImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV && setCornerSampledImage( Bool32 cornerSampledImage_ ) && VULKAN_HPP_NOEXCEPT { cornerSampledImage = cornerSampledImage_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCornerSampledImageFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCornerSampledImageFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCornerSampledImageFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cornerSampledImage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); # endif } bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; void * pNext = {}; Bool32 cornerSampledImage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCornerSampledImageFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCornerSampledImageFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCoverageReductionModeFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCoverageReductionModeFeaturesNV.html struct PhysicalDeviceCoverageReductionModeFeaturesNV { using NativeType = VkPhysicalDeviceCoverageReductionModeFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( Bool32 coverageReductionMode_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , coverageReductionMode{ coverageReductionMode_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( Bool32 coverageReductionMode_ ) & VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV && setCoverageReductionMode( Bool32 coverageReductionMode_ ) && VULKAN_HPP_NOEXCEPT { coverageReductionMode = coverageReductionMode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCoverageReductionModeFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, coverageReductionMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); # endif } bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; void * pNext = {}; Bool32 coverageReductionMode = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceCubicClampFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicClampFeaturesQCOM.html struct PhysicalDeviceCubicClampFeaturesQCOM { using NativeType = VkPhysicalDeviceCubicClampFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cubicRangeClamp{ cubicRangeClamp_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCubicClampFeaturesQCOM( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCubicClampFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCubicClampFeaturesQCOM & operator=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCubicClampFeaturesQCOM & operator=( VkPhysicalDeviceCubicClampFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM & setCubicRangeClamp( Bool32 cubicRangeClamp_ ) & VULKAN_HPP_NOEXCEPT { cubicRangeClamp = cubicRangeClamp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicClampFeaturesQCOM && setCubicRangeClamp( Bool32 cubicRangeClamp_ ) && VULKAN_HPP_NOEXCEPT { cubicRangeClamp = cubicRangeClamp_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCubicClampFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCubicClampFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCubicClampFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cubicRangeClamp ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); # endif } bool operator!=( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; void * pNext = {}; Bool32 cubicRangeClamp = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCubicClampFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceCubicClampFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceCubicWeightsFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCubicWeightsFeaturesQCOM.html struct PhysicalDeviceCubicWeightsFeaturesQCOM { using NativeType = VkPhysicalDeviceCubicWeightsFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( Bool32 selectableCubicWeights_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , selectableCubicWeights{ selectableCubicWeights_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCubicWeightsFeaturesQCOM( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCubicWeightsFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCubicWeightsFeaturesQCOM & operator=( VkPhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM & setSelectableCubicWeights( Bool32 selectableCubicWeights_ ) & VULKAN_HPP_NOEXCEPT { selectableCubicWeights = selectableCubicWeights_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCubicWeightsFeaturesQCOM && setSelectableCubicWeights( Bool32 selectableCubicWeights_ ) && VULKAN_HPP_NOEXCEPT { selectableCubicWeights = selectableCubicWeights_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCubicWeightsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCubicWeightsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, selectableCubicWeights ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCubicWeightsFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( selectableCubicWeights == rhs.selectableCubicWeights ); # endif } bool operator!=( PhysicalDeviceCubicWeightsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCubicWeightsFeaturesQCOM; void * pNext = {}; Bool32 selectableCubicWeights = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCubicWeightsFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceCubicWeightsFeaturesQCOM; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchFeaturesNV.html struct PhysicalDeviceCudaKernelLaunchFeaturesNV { using NativeType = VkPhysicalDeviceCudaKernelLaunchFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( Bool32 cudaKernelLaunchFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCudaKernelLaunchFeaturesNV( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCudaKernelLaunchFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCudaKernelLaunchFeaturesNV & operator=( VkPhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV & setCudaKernelLaunchFeatures( Bool32 cudaKernelLaunchFeatures_ ) & VULKAN_HPP_NOEXCEPT { cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCudaKernelLaunchFeaturesNV && setCudaKernelLaunchFeatures( Bool32 cudaKernelLaunchFeatures_ ) && VULKAN_HPP_NOEXCEPT { cudaKernelLaunchFeatures = cudaKernelLaunchFeatures_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, cudaKernelLaunchFeatures ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default; # else bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures ); # endif } bool operator!=( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchFeaturesNV; void * pNext = {}; Bool32 cudaKernelLaunchFeatures = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV; }; # endif template <> struct CppType { using Type = PhysicalDeviceCudaKernelLaunchFeaturesNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCudaKernelLaunchPropertiesNV.html struct PhysicalDeviceCudaKernelLaunchPropertiesNV { using NativeType = VkPhysicalDeviceCudaKernelLaunchPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {}, uint32_t computeCapabilityMajor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , computeCapabilityMinor{ computeCapabilityMinor_ } , computeCapabilityMajor{ computeCapabilityMajor_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCudaKernelLaunchPropertiesNV( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCudaKernelLaunchPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCudaKernelLaunchPropertiesNV & operator=( VkPhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCudaKernelLaunchPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, computeCapabilityMinor, computeCapabilityMajor ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCudaKernelLaunchPropertiesNV const & ) const = default; # else bool operator==( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( computeCapabilityMinor == rhs.computeCapabilityMinor ) && ( computeCapabilityMajor == rhs.computeCapabilityMajor ); # endif } bool operator!=( PhysicalDeviceCudaKernelLaunchPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceCudaKernelLaunchPropertiesNV; void * pNext = {}; uint32_t computeCapabilityMinor = {}; uint32_t computeCapabilityMajor = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV; }; # endif template <> struct CppType { using Type = PhysicalDeviceCudaKernelLaunchPropertiesNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkPhysicalDeviceCustomBorderColorFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorFeaturesEXT.html struct PhysicalDeviceCustomBorderColorFeaturesEXT { using NativeType = VkPhysicalDeviceCustomBorderColorFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( Bool32 customBorderColors_ = {}, Bool32 customBorderColorWithoutFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , customBorderColors{ customBorderColors_ } , customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( Bool32 customBorderColors_ ) & VULKAN_HPP_NOEXCEPT { customBorderColors = customBorderColors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT && setCustomBorderColors( Bool32 customBorderColors_ ) && VULKAN_HPP_NOEXCEPT { customBorderColors = customBorderColors_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( Bool32 customBorderColorWithoutFormat_ ) & VULKAN_HPP_NOEXCEPT { customBorderColorWithoutFormat = customBorderColorWithoutFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT && setCustomBorderColorWithoutFormat( Bool32 customBorderColorWithoutFormat_ ) && VULKAN_HPP_NOEXCEPT { customBorderColorWithoutFormat = customBorderColorWithoutFormat_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, customBorderColors, customBorderColorWithoutFormat ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customBorderColors == rhs.customBorderColors ) && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat ); # endif } bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT; void * pNext = {}; Bool32 customBorderColors = {}; Bool32 customBorderColorWithoutFormat = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceCustomBorderColorFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceCustomBorderColorPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomBorderColorPropertiesEXT.html struct PhysicalDeviceCustomBorderColorPropertiesEXT { using NativeType = VkPhysicalDeviceCustomBorderColorPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCustomBorderColorPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxCustomBorderColorSamplers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers ); # endif } bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; void * pNext = {}; uint32_t maxCustomBorderColorSamplers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceCustomBorderColorPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceCustomResolveFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceCustomResolveFeaturesEXT.html struct PhysicalDeviceCustomResolveFeaturesEXT { using NativeType = VkPhysicalDeviceCustomResolveFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomResolveFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomResolveFeaturesEXT( Bool32 customResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , customResolve{ customResolve_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomResolveFeaturesEXT( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceCustomResolveFeaturesEXT( VkPhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceCustomResolveFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceCustomResolveFeaturesEXT & operator=( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceCustomResolveFeaturesEXT & operator=( VkPhysicalDeviceCustomResolveFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT & setCustomResolve( Bool32 customResolve_ ) & VULKAN_HPP_NOEXCEPT { customResolve = customResolve_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomResolveFeaturesEXT && setCustomResolve( Bool32 customResolve_ ) && VULKAN_HPP_NOEXCEPT { customResolve = customResolve_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceCustomResolveFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomResolveFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceCustomResolveFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceCustomResolveFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, customResolve ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCustomResolveFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( customResolve == rhs.customResolve ); # endif } bool operator!=( PhysicalDeviceCustomResolveFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceCustomResolveFeaturesEXT; void * pNext = {}; Bool32 customResolve = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceCustomResolveFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceCustomResolveFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDataGraphFeaturesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphFeaturesARM.html struct PhysicalDeviceDataGraphFeaturesARM { using NativeType = VkPhysicalDeviceDataGraphFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDataGraphFeaturesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphFeaturesARM( Bool32 dataGraph_ = {}, Bool32 dataGraphUpdateAfterBind_ = {}, Bool32 dataGraphSpecializationConstants_ = {}, Bool32 dataGraphDescriptorBuffer_ = {}, Bool32 dataGraphShaderModule_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dataGraph{ dataGraph_ } , dataGraphUpdateAfterBind{ dataGraphUpdateAfterBind_ } , dataGraphSpecializationConstants{ dataGraphSpecializationConstants_ } , dataGraphDescriptorBuffer{ dataGraphDescriptorBuffer_ } , dataGraphShaderModule{ dataGraphShaderModule_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphFeaturesARM( PhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDataGraphFeaturesARM( VkPhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDataGraphFeaturesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDataGraphFeaturesARM & operator=( PhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDataGraphFeaturesARM & operator=( VkPhysicalDeviceDataGraphFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraph( Bool32 dataGraph_ ) & VULKAN_HPP_NOEXCEPT { dataGraph = dataGraph_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setDataGraph( Bool32 dataGraph_ ) && VULKAN_HPP_NOEXCEPT { dataGraph = dataGraph_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphUpdateAfterBind( Bool32 dataGraphUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { dataGraphUpdateAfterBind = dataGraphUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setDataGraphUpdateAfterBind( Bool32 dataGraphUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { dataGraphUpdateAfterBind = dataGraphUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphSpecializationConstants( Bool32 dataGraphSpecializationConstants_ ) & VULKAN_HPP_NOEXCEPT { dataGraphSpecializationConstants = dataGraphSpecializationConstants_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setDataGraphSpecializationConstants( Bool32 dataGraphSpecializationConstants_ ) && VULKAN_HPP_NOEXCEPT { dataGraphSpecializationConstants = dataGraphSpecializationConstants_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphDescriptorBuffer( Bool32 dataGraphDescriptorBuffer_ ) & VULKAN_HPP_NOEXCEPT { dataGraphDescriptorBuffer = dataGraphDescriptorBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setDataGraphDescriptorBuffer( Bool32 dataGraphDescriptorBuffer_ ) && VULKAN_HPP_NOEXCEPT { dataGraphDescriptorBuffer = dataGraphDescriptorBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM & setDataGraphShaderModule( Bool32 dataGraphShaderModule_ ) & VULKAN_HPP_NOEXCEPT { dataGraphShaderModule = dataGraphShaderModule_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphFeaturesARM && setDataGraphShaderModule( Bool32 dataGraphShaderModule_ ) && VULKAN_HPP_NOEXCEPT { dataGraphShaderModule = dataGraphShaderModule_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDataGraphFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphFeaturesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphFeaturesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dataGraph, dataGraphUpdateAfterBind, dataGraphSpecializationConstants, dataGraphDescriptorBuffer, dataGraphShaderModule ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDataGraphFeaturesARM const & ) const = default; #else bool operator==( PhysicalDeviceDataGraphFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraph == rhs.dataGraph ) && ( dataGraphUpdateAfterBind == rhs.dataGraphUpdateAfterBind ) && ( dataGraphSpecializationConstants == rhs.dataGraphSpecializationConstants ) && ( dataGraphDescriptorBuffer == rhs.dataGraphDescriptorBuffer ) && ( dataGraphShaderModule == rhs.dataGraphShaderModule ); # endif } bool operator!=( PhysicalDeviceDataGraphFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDataGraphFeaturesARM; void * pNext = {}; Bool32 dataGraph = {}; Bool32 dataGraphUpdateAfterBind = {}; Bool32 dataGraphSpecializationConstants = {}; Bool32 dataGraphDescriptorBuffer = {}; Bool32 dataGraphShaderModule = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDataGraphFeaturesARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceDataGraphFeaturesARM; }; // wrapper struct for struct VkPhysicalDeviceDataGraphModelFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDataGraphModelFeaturesQCOM.html struct PhysicalDeviceDataGraphModelFeaturesQCOM { using NativeType = VkPhysicalDeviceDataGraphModelFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDataGraphModelFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphModelFeaturesQCOM( Bool32 dataGraphModel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dataGraphModel{ dataGraphModel_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDataGraphModelFeaturesQCOM( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDataGraphModelFeaturesQCOM( VkPhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDataGraphModelFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDataGraphModelFeaturesQCOM & operator=( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDataGraphModelFeaturesQCOM & operator=( VkPhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM & setDataGraphModel( Bool32 dataGraphModel_ ) & VULKAN_HPP_NOEXCEPT { dataGraphModel = dataGraphModel_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDataGraphModelFeaturesQCOM && setDataGraphModel( Bool32 dataGraphModel_ ) && VULKAN_HPP_NOEXCEPT { dataGraphModel = dataGraphModel_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDataGraphModelFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphModelFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphModelFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDataGraphModelFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dataGraphModel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDataGraphModelFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dataGraphModel == rhs.dataGraphModel ); # endif } bool operator!=( PhysicalDeviceDataGraphModelFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDataGraphModelFeaturesQCOM; void * pNext = {}; Bool32 dataGraphModel = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDataGraphModelFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceDataGraphModelFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV.html struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { using NativeType = VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( Bool32 dedicatedAllocationImageAliasing_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( Bool32 dedicatedAllocationImageAliasing_ ) & VULKAN_HPP_NOEXCEPT { dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV && setDedicatedAllocationImageAliasing( Bool32 dedicatedAllocationImageAliasing_ ) && VULKAN_HPP_NOEXCEPT { dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dedicatedAllocationImageAliasing ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); # endif } bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; void * pNext = {}; Bool32 dedicatedAllocationImageAliasing = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX.html struct PhysicalDeviceDenseGeometryFormatFeaturesAMDX { using NativeType = VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDenseGeometryFormatFeaturesAMDX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDenseGeometryFormatFeaturesAMDX( Bool32 denseGeometryFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , denseGeometryFormat{ denseGeometryFormat_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDenseGeometryFormatFeaturesAMDX( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDenseGeometryFormatFeaturesAMDX( VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDenseGeometryFormatFeaturesAMDX( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDenseGeometryFormatFeaturesAMDX & operator=( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDenseGeometryFormatFeaturesAMDX & operator=( VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX & setDenseGeometryFormat( Bool32 denseGeometryFormat_ ) & VULKAN_HPP_NOEXCEPT { denseGeometryFormat = denseGeometryFormat_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDenseGeometryFormatFeaturesAMDX && setDenseGeometryFormat( Bool32 denseGeometryFormat_ ) && VULKAN_HPP_NOEXCEPT { denseGeometryFormat = denseGeometryFormat_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDenseGeometryFormatFeaturesAMDX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, denseGeometryFormat ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & ) const = default; # else bool operator==( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denseGeometryFormat == rhs.denseGeometryFormat ); # endif } bool operator!=( PhysicalDeviceDenseGeometryFormatFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceDenseGeometryFormatFeaturesAMDX; void * pNext = {}; Bool32 denseGeometryFormat = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDenseGeometryFormatFeaturesAMDX; }; # endif template <> struct CppType { using Type = PhysicalDeviceDenseGeometryFormatFeaturesAMDX; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkPhysicalDeviceDepthBiasControlFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthBiasControlFeaturesEXT.html struct PhysicalDeviceDepthBiasControlFeaturesEXT { using NativeType = VkPhysicalDeviceDepthBiasControlFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( Bool32 depthBiasControl_ = {}, Bool32 leastRepresentableValueForceUnormRepresentation_ = {}, Bool32 floatRepresentation_ = {}, Bool32 depthBiasExact_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthBiasControl{ depthBiasControl_ } , leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ } , floatRepresentation{ floatRepresentation_ } , depthBiasExact{ depthBiasExact_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthBiasControlFeaturesEXT( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthBiasControlFeaturesEXT( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthBiasControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthBiasControlFeaturesEXT & operator=( VkPhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasControl( Bool32 depthBiasControl_ ) & VULKAN_HPP_NOEXCEPT { depthBiasControl = depthBiasControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT && setDepthBiasControl( Bool32 depthBiasControl_ ) && VULKAN_HPP_NOEXCEPT { depthBiasControl = depthBiasControl_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setLeastRepresentableValueForceUnormRepresentation( Bool32 leastRepresentableValueForceUnormRepresentation_ ) & VULKAN_HPP_NOEXCEPT { leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT && setLeastRepresentableValueForceUnormRepresentation( Bool32 leastRepresentableValueForceUnormRepresentation_ ) && VULKAN_HPP_NOEXCEPT { leastRepresentableValueForceUnormRepresentation = leastRepresentableValueForceUnormRepresentation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setFloatRepresentation( Bool32 floatRepresentation_ ) & VULKAN_HPP_NOEXCEPT { floatRepresentation = floatRepresentation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT && setFloatRepresentation( Bool32 floatRepresentation_ ) && VULKAN_HPP_NOEXCEPT { floatRepresentation = floatRepresentation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT & setDepthBiasExact( Bool32 depthBiasExact_ ) & VULKAN_HPP_NOEXCEPT { depthBiasExact = depthBiasExact_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthBiasControlFeaturesEXT && setDepthBiasExact( Bool32 depthBiasExact_ ) && VULKAN_HPP_NOEXCEPT { depthBiasExact = depthBiasExact_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthBiasControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthBiasControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthBiasControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthBiasControl, leastRepresentableValueForceUnormRepresentation, floatRepresentation, depthBiasExact ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthBiasControlFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthBiasControl == rhs.depthBiasControl ) && ( leastRepresentableValueForceUnormRepresentation == rhs.leastRepresentableValueForceUnormRepresentation ) && ( floatRepresentation == rhs.floatRepresentation ) && ( depthBiasExact == rhs.depthBiasExact ); # endif } bool operator!=( PhysicalDeviceDepthBiasControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthBiasControlFeaturesEXT; void * pNext = {}; Bool32 depthBiasControl = {}; Bool32 leastRepresentableValueForceUnormRepresentation = {}; Bool32 floatRepresentation = {}; Bool32 depthBiasExact = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDepthClampControlFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampControlFeaturesEXT.html struct PhysicalDeviceDepthClampControlFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( Bool32 depthClampControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthClampControl{ depthClampControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setDepthClampControl( Bool32 depthClampControl_ ) & VULKAN_HPP_NOEXCEPT { depthClampControl = depthClampControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT && setDepthClampControl( Bool32 depthClampControl_ ) && VULKAN_HPP_NOEXCEPT { depthClampControl = depthClampControl_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthClampControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampControl == rhs.depthClampControl ); # endif } bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; void * pNext = {}; Bool32 depthClampControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthClampControlFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthClampControlFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDepthClampZeroOneFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClampZeroOneFeaturesKHR.html struct PhysicalDeviceDepthClampZeroOneFeaturesKHR { using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( Bool32 depthClampZeroOne_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthClampZeroOne{ depthClampZeroOne_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesKHR( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClampZeroOneFeaturesKHR( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClampZeroOneFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthClampZeroOneFeaturesKHR & operator=( VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR & setDepthClampZeroOne( Bool32 depthClampZeroOne_ ) & VULKAN_HPP_NOEXCEPT { depthClampZeroOne = depthClampZeroOne_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampZeroOneFeaturesKHR && setDepthClampZeroOne( Bool32 depthClampZeroOne_ ) && VULKAN_HPP_NOEXCEPT { depthClampZeroOne = depthClampZeroOne_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClampZeroOneFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthClampZeroOne ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampZeroOne == rhs.depthClampZeroOne ); # endif } bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesKHR; void * pNext = {}; Bool32 depthClampZeroOne = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthClampZeroOneFeaturesKHR; }; using PhysicalDeviceDepthClampZeroOneFeaturesEXT = PhysicalDeviceDepthClampZeroOneFeaturesKHR; // wrapper struct for struct VkPhysicalDeviceDepthClipControlFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipControlFeaturesEXT.html struct PhysicalDeviceDepthClipControlFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClipControlFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( Bool32 depthClipControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthClipControl{ depthClipControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthClipControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setDepthClipControl( Bool32 depthClipControl_ ) & VULKAN_HPP_NOEXCEPT { depthClipControl = depthClipControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT && setDepthClipControl( Bool32 depthClipControl_ ) && VULKAN_HPP_NOEXCEPT { depthClipControl = depthClipControl_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthClipControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClipControlFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipControl == rhs.depthClipControl ); # endif } bool operator!=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT; void * pNext = {}; Bool32 depthClipControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthClipControlFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthClipControlFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDepthClipEnableFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthClipEnableFeaturesEXT.html struct PhysicalDeviceDepthClipEnableFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClipEnableFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( Bool32 depthClipEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , depthClipEnable{ depthClipEnable_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( Bool32 depthClipEnable_ ) & VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT && setDepthClipEnable( Bool32 depthClipEnable_ ) && VULKAN_HPP_NOEXCEPT { depthClipEnable = depthClipEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthClipEnableFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthClipEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClipEnable == rhs.depthClipEnable ); # endif } bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; void * pNext = {}; Bool32 depthClipEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthClipEnableFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDepthStencilResolveProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDepthStencilResolveProperties.html struct PhysicalDeviceDepthStencilResolveProperties { using NativeType = VkPhysicalDeviceDepthStencilResolveProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( ResolveModeFlags supportedDepthResolveModes_ = {}, ResolveModeFlags supportedStencilResolveModes_ = {}, Bool32 independentResolveNone_ = {}, Bool32 independentResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , supportedDepthResolveModes{ supportedDepthResolveModes_ } , supportedStencilResolveModes{ supportedStencilResolveModes_ } , independentResolveNone{ independentResolveNone_ } , independentResolve{ independentResolve_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDepthStencilResolveProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDepthStencilResolveProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDepthStencilResolveProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; #else bool operator==( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) && ( independentResolveNone == rhs.independentResolveNone ) && ( independentResolve == rhs.independentResolve ); # endif } bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; void * pNext = {}; ResolveModeFlags supportedDepthResolveModes = {}; ResolveModeFlags supportedStencilResolveModes = {}; Bool32 independentResolveNone = {}; Bool32 independentResolve = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDepthStencilResolveProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceDepthStencilResolveProperties; }; using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties; // wrapper struct for struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT.html struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT { using NativeType = VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDensityMapDescriptorSize == rhs.combinedImageSamplerDensityMapDescriptorSize ); # endif } bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; void * pNext = {}; size_t combinedImageSamplerDensityMapDescriptorSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceDescriptorBufferFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferFeaturesEXT.html struct PhysicalDeviceDescriptorBufferFeaturesEXT { using NativeType = VkPhysicalDeviceDescriptorBufferFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( Bool32 descriptorBuffer_ = {}, Bool32 descriptorBufferCaptureReplay_ = {}, Bool32 descriptorBufferImageLayoutIgnored_ = {}, Bool32 descriptorBufferPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorBuffer{ descriptorBuffer_ } , descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ } , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBuffer( Bool32 descriptorBuffer_ ) & VULKAN_HPP_NOEXCEPT { descriptorBuffer = descriptorBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT && setDescriptorBuffer( Bool32 descriptorBuffer_ ) && VULKAN_HPP_NOEXCEPT { descriptorBuffer = descriptorBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferCaptureReplay( Bool32 descriptorBufferCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT && setDescriptorBufferCaptureReplay( Bool32 descriptorBufferCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { descriptorBufferCaptureReplay = descriptorBufferCaptureReplay_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferImageLayoutIgnored( Bool32 descriptorBufferImageLayoutIgnored_ ) & VULKAN_HPP_NOEXCEPT { descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT && setDescriptorBufferImageLayoutIgnored( Bool32 descriptorBufferImageLayoutIgnored_ ) && VULKAN_HPP_NOEXCEPT { descriptorBufferImageLayoutIgnored = descriptorBufferImageLayoutIgnored_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setDescriptorBufferPushDescriptors( Bool32 descriptorBufferPushDescriptors_ ) & VULKAN_HPP_NOEXCEPT { descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT && setDescriptorBufferPushDescriptors( Bool32 descriptorBufferPushDescriptors_ ) && VULKAN_HPP_NOEXCEPT { descriptorBufferPushDescriptors = descriptorBufferPushDescriptors_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorBuffer, descriptorBufferCaptureReplay, descriptorBufferImageLayoutIgnored, descriptorBufferPushDescriptors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBuffer == rhs.descriptorBuffer ) && ( descriptorBufferCaptureReplay == rhs.descriptorBufferCaptureReplay ) && ( descriptorBufferImageLayoutIgnored == rhs.descriptorBufferImageLayoutIgnored ) && ( descriptorBufferPushDescriptors == rhs.descriptorBufferPushDescriptors ); # endif } bool operator!=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT; void * pNext = {}; Bool32 descriptorBuffer = {}; Bool32 descriptorBufferCaptureReplay = {}; Bool32 descriptorBufferImageLayoutIgnored = {}; Bool32 descriptorBufferPushDescriptors = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDescriptorBufferPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferPropertiesEXT.html struct PhysicalDeviceDescriptorBufferPropertiesEXT { using NativeType = VkPhysicalDeviceDescriptorBufferPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( Bool32 combinedImageSamplerDescriptorSingleArray_ = {}, Bool32 bufferlessPushDescriptors_ = {}, Bool32 allowSamplerImageViewPostSubmitCreation_ = {}, DeviceSize descriptorBufferOffsetAlignment_ = {}, uint32_t maxDescriptorBufferBindings_ = {}, uint32_t maxResourceDescriptorBufferBindings_ = {}, uint32_t maxSamplerDescriptorBufferBindings_ = {}, uint32_t maxEmbeddedImmutableSamplerBindings_ = {}, uint32_t maxEmbeddedImmutableSamplers_ = {}, size_t bufferCaptureReplayDescriptorDataSize_ = {}, size_t imageCaptureReplayDescriptorDataSize_ = {}, size_t imageViewCaptureReplayDescriptorDataSize_ = {}, size_t samplerCaptureReplayDescriptorDataSize_ = {}, size_t accelerationStructureCaptureReplayDescriptorDataSize_ = {}, size_t samplerDescriptorSize_ = {}, size_t combinedImageSamplerDescriptorSize_ = {}, size_t sampledImageDescriptorSize_ = {}, size_t storageImageDescriptorSize_ = {}, size_t uniformTexelBufferDescriptorSize_ = {}, size_t robustUniformTexelBufferDescriptorSize_ = {}, size_t storageTexelBufferDescriptorSize_ = {}, size_t robustStorageTexelBufferDescriptorSize_ = {}, size_t uniformBufferDescriptorSize_ = {}, size_t robustUniformBufferDescriptorSize_ = {}, size_t storageBufferDescriptorSize_ = {}, size_t robustStorageBufferDescriptorSize_ = {}, size_t inputAttachmentDescriptorSize_ = {}, size_t accelerationStructureDescriptorSize_ = {}, DeviceSize maxSamplerDescriptorBufferRange_ = {}, DeviceSize maxResourceDescriptorBufferRange_ = {}, DeviceSize samplerDescriptorBufferAddressSpaceSize_ = {}, DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, DeviceSize descriptorBufferAddressSpaceSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ } , bufferlessPushDescriptors{ bufferlessPushDescriptors_ } , allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ } , descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ } , maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ } , maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ } , maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ } , maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ } , maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ } , bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ } , imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ } , imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ } , samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ } , accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ } , samplerDescriptorSize{ samplerDescriptorSize_ } , combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ } , sampledImageDescriptorSize{ sampledImageDescriptorSize_ } , storageImageDescriptorSize{ storageImageDescriptorSize_ } , uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ } , robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ } , storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ } , robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ } , uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ } , robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ } , storageBufferDescriptorSize{ storageBufferDescriptorSize_ } , robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ } , inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ } , accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ } , maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ } , maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ } , samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ } , resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ } , descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferPropertiesEXT( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorBufferPropertiesEXT( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorBufferPropertiesEXT & operator=( VkPhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, combinedImageSamplerDescriptorSingleArray, bufferlessPushDescriptors, allowSamplerImageViewPostSubmitCreation, descriptorBufferOffsetAlignment, maxDescriptorBufferBindings, maxResourceDescriptorBufferBindings, maxSamplerDescriptorBufferBindings, maxEmbeddedImmutableSamplerBindings, maxEmbeddedImmutableSamplers, bufferCaptureReplayDescriptorDataSize, imageCaptureReplayDescriptorDataSize, imageViewCaptureReplayDescriptorDataSize, samplerCaptureReplayDescriptorDataSize, accelerationStructureCaptureReplayDescriptorDataSize, samplerDescriptorSize, combinedImageSamplerDescriptorSize, sampledImageDescriptorSize, storageImageDescriptorSize, uniformTexelBufferDescriptorSize, robustUniformTexelBufferDescriptorSize, storageTexelBufferDescriptorSize, robustStorageTexelBufferDescriptorSize, uniformBufferDescriptorSize, robustUniformBufferDescriptorSize, storageBufferDescriptorSize, robustStorageBufferDescriptorSize, inputAttachmentDescriptorSize, accelerationStructureDescriptorSize, maxSamplerDescriptorBufferRange, maxResourceDescriptorBufferRange, samplerDescriptorBufferAddressSpaceSize, resourceDescriptorBufferAddressSpaceSize, descriptorBufferAddressSpaceSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( combinedImageSamplerDescriptorSingleArray == rhs.combinedImageSamplerDescriptorSingleArray ) && ( bufferlessPushDescriptors == rhs.bufferlessPushDescriptors ) && ( allowSamplerImageViewPostSubmitCreation == rhs.allowSamplerImageViewPostSubmitCreation ) && ( descriptorBufferOffsetAlignment == rhs.descriptorBufferOffsetAlignment ) && ( maxDescriptorBufferBindings == rhs.maxDescriptorBufferBindings ) && ( maxResourceDescriptorBufferBindings == rhs.maxResourceDescriptorBufferBindings ) && ( maxSamplerDescriptorBufferBindings == rhs.maxSamplerDescriptorBufferBindings ) && ( maxEmbeddedImmutableSamplerBindings == rhs.maxEmbeddedImmutableSamplerBindings ) && ( maxEmbeddedImmutableSamplers == rhs.maxEmbeddedImmutableSamplers ) && ( bufferCaptureReplayDescriptorDataSize == rhs.bufferCaptureReplayDescriptorDataSize ) && ( imageCaptureReplayDescriptorDataSize == rhs.imageCaptureReplayDescriptorDataSize ) && ( imageViewCaptureReplayDescriptorDataSize == rhs.imageViewCaptureReplayDescriptorDataSize ) && ( samplerCaptureReplayDescriptorDataSize == rhs.samplerCaptureReplayDescriptorDataSize ) && ( accelerationStructureCaptureReplayDescriptorDataSize == rhs.accelerationStructureCaptureReplayDescriptorDataSize ) && ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( combinedImageSamplerDescriptorSize == rhs.combinedImageSamplerDescriptorSize ) && ( sampledImageDescriptorSize == rhs.sampledImageDescriptorSize ) && ( storageImageDescriptorSize == rhs.storageImageDescriptorSize ) && ( uniformTexelBufferDescriptorSize == rhs.uniformTexelBufferDescriptorSize ) && ( robustUniformTexelBufferDescriptorSize == rhs.robustUniformTexelBufferDescriptorSize ) && ( storageTexelBufferDescriptorSize == rhs.storageTexelBufferDescriptorSize ) && ( robustStorageTexelBufferDescriptorSize == rhs.robustStorageTexelBufferDescriptorSize ) && ( uniformBufferDescriptorSize == rhs.uniformBufferDescriptorSize ) && ( robustUniformBufferDescriptorSize == rhs.robustUniformBufferDescriptorSize ) && ( storageBufferDescriptorSize == rhs.storageBufferDescriptorSize ) && ( robustStorageBufferDescriptorSize == rhs.robustStorageBufferDescriptorSize ) && ( inputAttachmentDescriptorSize == rhs.inputAttachmentDescriptorSize ) && ( accelerationStructureDescriptorSize == rhs.accelerationStructureDescriptorSize ) && ( maxSamplerDescriptorBufferRange == rhs.maxSamplerDescriptorBufferRange ) && ( maxResourceDescriptorBufferRange == rhs.maxResourceDescriptorBufferRange ) && ( samplerDescriptorBufferAddressSpaceSize == rhs.samplerDescriptorBufferAddressSpaceSize ) && ( resourceDescriptorBufferAddressSpaceSize == rhs.resourceDescriptorBufferAddressSpaceSize ) && ( descriptorBufferAddressSpaceSize == rhs.descriptorBufferAddressSpaceSize ); # endif } bool operator!=( PhysicalDeviceDescriptorBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT; void * pNext = {}; Bool32 combinedImageSamplerDescriptorSingleArray = {}; Bool32 bufferlessPushDescriptors = {}; Bool32 allowSamplerImageViewPostSubmitCreation = {}; DeviceSize descriptorBufferOffsetAlignment = {}; uint32_t maxDescriptorBufferBindings = {}; uint32_t maxResourceDescriptorBufferBindings = {}; uint32_t maxSamplerDescriptorBufferBindings = {}; uint32_t maxEmbeddedImmutableSamplerBindings = {}; uint32_t maxEmbeddedImmutableSamplers = {}; size_t bufferCaptureReplayDescriptorDataSize = {}; size_t imageCaptureReplayDescriptorDataSize = {}; size_t imageViewCaptureReplayDescriptorDataSize = {}; size_t samplerCaptureReplayDescriptorDataSize = {}; size_t accelerationStructureCaptureReplayDescriptorDataSize = {}; size_t samplerDescriptorSize = {}; size_t combinedImageSamplerDescriptorSize = {}; size_t sampledImageDescriptorSize = {}; size_t storageImageDescriptorSize = {}; size_t uniformTexelBufferDescriptorSize = {}; size_t robustUniformTexelBufferDescriptorSize = {}; size_t storageTexelBufferDescriptorSize = {}; size_t robustStorageTexelBufferDescriptorSize = {}; size_t uniformBufferDescriptorSize = {}; size_t robustUniformBufferDescriptorSize = {}; size_t storageBufferDescriptorSize = {}; size_t robustStorageBufferDescriptorSize = {}; size_t inputAttachmentDescriptorSize = {}; size_t accelerationStructureDescriptorSize = {}; DeviceSize maxSamplerDescriptorBufferRange = {}; DeviceSize maxResourceDescriptorBufferRange = {}; DeviceSize samplerDescriptorBufferAddressSpaceSize = {}; DeviceSize resourceDescriptorBufferAddressSpaceSize = {}; DeviceSize descriptorBufferAddressSpaceSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorFeaturesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorFeaturesARM.html struct PhysicalDeviceDescriptorBufferTensorFeaturesARM { using NativeType = VkPhysicalDeviceDescriptorBufferTensorFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorFeaturesARM( Bool32 descriptorBufferTensorDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorBufferTensorDescriptors{ descriptorBufferTensorDescriptors_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorFeaturesARM( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorBufferTensorFeaturesARM( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferTensorFeaturesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorBufferTensorFeaturesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM & setDescriptorBufferTensorDescriptors( Bool32 descriptorBufferTensorDescriptors_ ) & VULKAN_HPP_NOEXCEPT { descriptorBufferTensorDescriptors = descriptorBufferTensorDescriptors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorFeaturesARM && setDescriptorBufferTensorDescriptors( Bool32 descriptorBufferTensorDescriptors_ ) && VULKAN_HPP_NOEXCEPT { descriptorBufferTensorDescriptors = descriptorBufferTensorDescriptors_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorFeaturesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorBufferTensorDescriptors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorBufferTensorDescriptors == rhs.descriptorBufferTensorDescriptors ); # endif } bool operator!=( PhysicalDeviceDescriptorBufferTensorFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferTensorFeaturesARM; void * pNext = {}; Bool32 descriptorBufferTensorDescriptors = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferTensorFeaturesARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferTensorFeaturesARM; }; // wrapper struct for struct VkPhysicalDeviceDescriptorBufferTensorPropertiesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorBufferTensorPropertiesARM.html struct PhysicalDeviceDescriptorBufferTensorPropertiesARM { using NativeType = VkPhysicalDeviceDescriptorBufferTensorPropertiesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorPropertiesARM( size_t tensorCaptureReplayDescriptorDataSize_ = {}, size_t tensorViewCaptureReplayDescriptorDataSize_ = {}, size_t tensorDescriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensorCaptureReplayDescriptorDataSize{ tensorCaptureReplayDescriptorDataSize_ } , tensorViewCaptureReplayDescriptorDataSize{ tensorViewCaptureReplayDescriptorDataSize_ } , tensorDescriptorSize{ tensorDescriptorSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferTensorPropertiesARM( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorBufferTensorPropertiesARM( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferTensorPropertiesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorBufferTensorPropertiesARM & operator=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorBufferTensorPropertiesARM & operator=( VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorCaptureReplayDescriptorDataSize( size_t tensorCaptureReplayDescriptorDataSize_ ) & VULKAN_HPP_NOEXCEPT { tensorCaptureReplayDescriptorDataSize = tensorCaptureReplayDescriptorDataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM && setTensorCaptureReplayDescriptorDataSize( size_t tensorCaptureReplayDescriptorDataSize_ ) && VULKAN_HPP_NOEXCEPT { tensorCaptureReplayDescriptorDataSize = tensorCaptureReplayDescriptorDataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorViewCaptureReplayDescriptorDataSize( size_t tensorViewCaptureReplayDescriptorDataSize_ ) & VULKAN_HPP_NOEXCEPT { tensorViewCaptureReplayDescriptorDataSize = tensorViewCaptureReplayDescriptorDataSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM && setTensorViewCaptureReplayDescriptorDataSize( size_t tensorViewCaptureReplayDescriptorDataSize_ ) && VULKAN_HPP_NOEXCEPT { tensorViewCaptureReplayDescriptorDataSize = tensorViewCaptureReplayDescriptorDataSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM & setTensorDescriptorSize( size_t tensorDescriptorSize_ ) & VULKAN_HPP_NOEXCEPT { tensorDescriptorSize = tensorDescriptorSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferTensorPropertiesARM && setTensorDescriptorSize( size_t tensorDescriptorSize_ ) && VULKAN_HPP_NOEXCEPT { tensorDescriptorSize = tensorDescriptorSize_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorBufferTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensorCaptureReplayDescriptorDataSize, tensorViewCaptureReplayDescriptorDataSize, tensorDescriptorSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorCaptureReplayDescriptorDataSize == rhs.tensorCaptureReplayDescriptorDataSize ) && ( tensorViewCaptureReplayDescriptorDataSize == rhs.tensorViewCaptureReplayDescriptorDataSize ) && ( tensorDescriptorSize == rhs.tensorDescriptorSize ); # endif } bool operator!=( PhysicalDeviceDescriptorBufferTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferTensorPropertiesARM; void * pNext = {}; size_t tensorCaptureReplayDescriptorDataSize = {}; size_t tensorViewCaptureReplayDescriptorDataSize = {}; size_t tensorDescriptorSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorBufferTensorPropertiesARM; }; // wrapper struct for struct VkPhysicalDeviceDescriptorHeapFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapFeaturesEXT.html struct PhysicalDeviceDescriptorHeapFeaturesEXT { using NativeType = VkPhysicalDeviceDescriptorHeapFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapFeaturesEXT( Bool32 descriptorHeap_ = {}, Bool32 descriptorHeapCaptureReplay_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorHeap{ descriptorHeap_ } , descriptorHeapCaptureReplay{ descriptorHeapCaptureReplay_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapFeaturesEXT( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorHeapFeaturesEXT( VkPhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorHeapFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorHeapFeaturesEXT & operator=( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorHeapFeaturesEXT & operator=( VkPhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setDescriptorHeap( Bool32 descriptorHeap_ ) & VULKAN_HPP_NOEXCEPT { descriptorHeap = descriptorHeap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setDescriptorHeap( Bool32 descriptorHeap_ ) && VULKAN_HPP_NOEXCEPT { descriptorHeap = descriptorHeap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT & setDescriptorHeapCaptureReplay( Bool32 descriptorHeapCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { descriptorHeapCaptureReplay = descriptorHeapCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorHeapFeaturesEXT && setDescriptorHeapCaptureReplay( Bool32 descriptorHeapCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { descriptorHeapCaptureReplay = descriptorHeapCaptureReplay_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorHeapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorHeap, descriptorHeapCaptureReplay ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorHeapFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorHeap == rhs.descriptorHeap ) && ( descriptorHeapCaptureReplay == rhs.descriptorHeapCaptureReplay ); # endif } bool operator!=( PhysicalDeviceDescriptorHeapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapFeaturesEXT; void * pNext = {}; Bool32 descriptorHeap = {}; Bool32 descriptorHeapCaptureReplay = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDescriptorHeapPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapPropertiesEXT.html struct PhysicalDeviceDescriptorHeapPropertiesEXT { using NativeType = VkPhysicalDeviceDescriptorHeapPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapPropertiesEXT( DeviceSize samplerHeapAlignment_ = {}, DeviceSize resourceHeapAlignment_ = {}, DeviceSize maxSamplerHeapSize_ = {}, DeviceSize maxResourceHeapSize_ = {}, DeviceSize minSamplerHeapReservedRange_ = {}, DeviceSize minSamplerHeapReservedRangeWithEmbedded_ = {}, DeviceSize minResourceHeapReservedRange_ = {}, DeviceSize samplerDescriptorSize_ = {}, DeviceSize imageDescriptorSize_ = {}, DeviceSize bufferDescriptorSize_ = {}, DeviceSize samplerDescriptorAlignment_ = {}, DeviceSize imageDescriptorAlignment_ = {}, DeviceSize bufferDescriptorAlignment_ = {}, DeviceSize maxPushDataSize_ = {}, size_t imageCaptureReplayOpaqueDataSize_ = {}, uint32_t maxDescriptorHeapEmbeddedSamplers_ = {}, uint32_t samplerYcbcrConversionCount_ = {}, Bool32 sparseDescriptorHeaps_ = {}, Bool32 protectedDescriptorHeaps_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , samplerHeapAlignment{ samplerHeapAlignment_ } , resourceHeapAlignment{ resourceHeapAlignment_ } , maxSamplerHeapSize{ maxSamplerHeapSize_ } , maxResourceHeapSize{ maxResourceHeapSize_ } , minSamplerHeapReservedRange{ minSamplerHeapReservedRange_ } , minSamplerHeapReservedRangeWithEmbedded{ minSamplerHeapReservedRangeWithEmbedded_ } , minResourceHeapReservedRange{ minResourceHeapReservedRange_ } , samplerDescriptorSize{ samplerDescriptorSize_ } , imageDescriptorSize{ imageDescriptorSize_ } , bufferDescriptorSize{ bufferDescriptorSize_ } , samplerDescriptorAlignment{ samplerDescriptorAlignment_ } , imageDescriptorAlignment{ imageDescriptorAlignment_ } , bufferDescriptorAlignment{ bufferDescriptorAlignment_ } , maxPushDataSize{ maxPushDataSize_ } , imageCaptureReplayOpaqueDataSize{ imageCaptureReplayOpaqueDataSize_ } , maxDescriptorHeapEmbeddedSamplers{ maxDescriptorHeapEmbeddedSamplers_ } , samplerYcbcrConversionCount{ samplerYcbcrConversionCount_ } , sparseDescriptorHeaps{ sparseDescriptorHeaps_ } , protectedDescriptorHeaps{ protectedDescriptorHeaps_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapPropertiesEXT( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorHeapPropertiesEXT( VkPhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorHeapPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorHeapPropertiesEXT & operator=( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorHeapPropertiesEXT & operator=( VkPhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDescriptorHeapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, samplerHeapAlignment, resourceHeapAlignment, maxSamplerHeapSize, maxResourceHeapSize, minSamplerHeapReservedRange, minSamplerHeapReservedRangeWithEmbedded, minResourceHeapReservedRange, samplerDescriptorSize, imageDescriptorSize, bufferDescriptorSize, samplerDescriptorAlignment, imageDescriptorAlignment, bufferDescriptorAlignment, maxPushDataSize, imageCaptureReplayOpaqueDataSize, maxDescriptorHeapEmbeddedSamplers, samplerYcbcrConversionCount, sparseDescriptorHeaps, protectedDescriptorHeaps ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorHeapPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( samplerHeapAlignment == rhs.samplerHeapAlignment ) && ( resourceHeapAlignment == rhs.resourceHeapAlignment ) && ( maxSamplerHeapSize == rhs.maxSamplerHeapSize ) && ( maxResourceHeapSize == rhs.maxResourceHeapSize ) && ( minSamplerHeapReservedRange == rhs.minSamplerHeapReservedRange ) && ( minSamplerHeapReservedRangeWithEmbedded == rhs.minSamplerHeapReservedRangeWithEmbedded ) && ( minResourceHeapReservedRange == rhs.minResourceHeapReservedRange ) && ( samplerDescriptorSize == rhs.samplerDescriptorSize ) && ( imageDescriptorSize == rhs.imageDescriptorSize ) && ( bufferDescriptorSize == rhs.bufferDescriptorSize ) && ( samplerDescriptorAlignment == rhs.samplerDescriptorAlignment ) && ( imageDescriptorAlignment == rhs.imageDescriptorAlignment ) && ( bufferDescriptorAlignment == rhs.bufferDescriptorAlignment ) && ( maxPushDataSize == rhs.maxPushDataSize ) && ( imageCaptureReplayOpaqueDataSize == rhs.imageCaptureReplayOpaqueDataSize ) && ( maxDescriptorHeapEmbeddedSamplers == rhs.maxDescriptorHeapEmbeddedSamplers ) && ( samplerYcbcrConversionCount == rhs.samplerYcbcrConversionCount ) && ( sparseDescriptorHeaps == rhs.sparseDescriptorHeaps ) && ( protectedDescriptorHeaps == rhs.protectedDescriptorHeaps ); # endif } bool operator!=( PhysicalDeviceDescriptorHeapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapPropertiesEXT; void * pNext = {}; DeviceSize samplerHeapAlignment = {}; DeviceSize resourceHeapAlignment = {}; DeviceSize maxSamplerHeapSize = {}; DeviceSize maxResourceHeapSize = {}; DeviceSize minSamplerHeapReservedRange = {}; DeviceSize minSamplerHeapReservedRangeWithEmbedded = {}; DeviceSize minResourceHeapReservedRange = {}; DeviceSize samplerDescriptorSize = {}; DeviceSize imageDescriptorSize = {}; DeviceSize bufferDescriptorSize = {}; DeviceSize samplerDescriptorAlignment = {}; DeviceSize imageDescriptorAlignment = {}; DeviceSize bufferDescriptorAlignment = {}; DeviceSize maxPushDataSize = {}; size_t imageCaptureReplayOpaqueDataSize = {}; uint32_t maxDescriptorHeapEmbeddedSamplers = {}; uint32_t samplerYcbcrConversionCount = {}; Bool32 sparseDescriptorHeaps = {}; Bool32 protectedDescriptorHeaps = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceDescriptorHeapTensorPropertiesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorHeapTensorPropertiesARM.html struct PhysicalDeviceDescriptorHeapTensorPropertiesARM { using NativeType = VkPhysicalDeviceDescriptorHeapTensorPropertiesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorHeapTensorPropertiesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapTensorPropertiesARM( DeviceSize tensorDescriptorSize_ = {}, DeviceSize tensorDescriptorAlignment_ = {}, size_t tensorCaptureReplayOpaqueDataSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , tensorDescriptorSize{ tensorDescriptorSize_ } , tensorDescriptorAlignment{ tensorDescriptorAlignment_ } , tensorCaptureReplayOpaqueDataSize{ tensorCaptureReplayOpaqueDataSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorHeapTensorPropertiesARM( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorHeapTensorPropertiesARM( VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorHeapTensorPropertiesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorHeapTensorPropertiesARM & operator=( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorHeapTensorPropertiesARM & operator=( VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorHeapTensorPropertiesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, tensorDescriptorSize, tensorDescriptorAlignment, tensorCaptureReplayOpaqueDataSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( tensorDescriptorSize == rhs.tensorDescriptorSize ) && ( tensorDescriptorAlignment == rhs.tensorDescriptorAlignment ) && ( tensorCaptureReplayOpaqueDataSize == rhs.tensorCaptureReplayOpaqueDataSize ); # endif } bool operator!=( PhysicalDeviceDescriptorHeapTensorPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorHeapTensorPropertiesARM; void * pNext = {}; DeviceSize tensorDescriptorSize = {}; DeviceSize tensorDescriptorAlignment = {}; size_t tensorCaptureReplayOpaqueDataSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapTensorPropertiesARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorHeapTensorPropertiesARM; }; // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html struct PhysicalDeviceDescriptorIndexingFeatures { using NativeType = VkPhysicalDeviceDescriptorIndexingFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, Bool32 descriptorBindingPartiallyBound_ = {}, Bool32 descriptorBindingVariableDescriptorCount_ = {}, Bool32 runtimeDescriptorArray_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } , runtimeDescriptorArray{ runtimeDescriptorArray_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderInputAttachmentArrayDynamicIndexing( Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderUniformTexelBufferArrayDynamicIndexing( Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderStorageTexelBufferArrayDynamicIndexing( Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderUniformBufferArrayNonUniformIndexing( Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderSampledImageArrayNonUniformIndexing( Bool32 shaderSampledImageArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderStorageBufferArrayNonUniformIndexing( Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderStorageImageArrayNonUniformIndexing( Bool32 shaderStorageImageArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderInputAttachmentArrayNonUniformIndexing( Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderUniformTexelBufferArrayNonUniformIndexing( Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) & VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setShaderStorageTexelBufferArrayNonUniformIndexing( Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) && VULKAN_HPP_NOEXCEPT { shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingUniformBufferUpdateAfterBind( Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingSampledImageUpdateAfterBind( Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingStorageImageUpdateAfterBind( Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingStorageBufferUpdateAfterBind( Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingUniformTexelBufferUpdateAfterBind( Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingStorageTexelBufferUpdateAfterBind( Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingUpdateUnusedWhilePending( Bool32 descriptorBindingUpdateUnusedWhilePending_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingPartiallyBound( Bool32 descriptorBindingPartiallyBound_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setDescriptorBindingVariableDescriptorCount( Bool32 descriptorBindingVariableDescriptorCount_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) & VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures && setRuntimeDescriptorArray( Bool32 runtimeDescriptorArray_ ) && VULKAN_HPP_NOEXCEPT { runtimeDescriptorArray = runtimeDescriptorArray_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderInputAttachmentArrayDynamicIndexing, shaderUniformTexelBufferArrayDynamicIndexing, shaderStorageTexelBufferArrayDynamicIndexing, shaderUniformBufferArrayNonUniformIndexing, shaderSampledImageArrayNonUniformIndexing, shaderStorageBufferArrayNonUniformIndexing, shaderStorageImageArrayNonUniformIndexing, shaderInputAttachmentArrayNonUniformIndexing, shaderUniformTexelBufferArrayNonUniformIndexing, shaderStorageTexelBufferArrayNonUniformIndexing, descriptorBindingUniformBufferUpdateAfterBind, descriptorBindingSampledImageUpdateAfterBind, descriptorBindingStorageImageUpdateAfterBind, descriptorBindingStorageBufferUpdateAfterBind, descriptorBindingUniformTexelBufferUpdateAfterBind, descriptorBindingStorageTexelBufferUpdateAfterBind, descriptorBindingUpdateUnusedWhilePending, descriptorBindingPartiallyBound, descriptorBindingVariableDescriptorCount, runtimeDescriptorArray ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); # endif } bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures; void * pNext = {}; Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; Bool32 shaderSampledImageArrayNonUniformIndexing = {}; Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; Bool32 shaderStorageImageArrayNonUniformIndexing = {}; Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; Bool32 descriptorBindingUpdateUnusedWhilePending = {}; Bool32 descriptorBindingPartiallyBound = {}; Bool32 descriptorBindingVariableDescriptorCount = {}; Bool32 runtimeDescriptorArray = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorIndexingFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorIndexingFeatures; }; using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures; // wrapper struct for struct VkPhysicalDeviceDescriptorIndexingProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorIndexingProperties.html struct PhysicalDeviceDescriptorIndexingProperties { using NativeType = VkPhysicalDeviceDescriptorIndexingProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, Bool32 robustBufferAccessUpdateAfterBind_ = {}, Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDescriptorIndexingProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorIndexingProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxUpdateAfterBindDescriptorsInAllPools, shaderUniformBufferArrayNonUniformIndexingNative, shaderSampledImageArrayNonUniformIndexingNative, shaderStorageBufferArrayNonUniformIndexingNative, shaderStorageImageArrayNonUniformIndexingNative, shaderInputAttachmentArrayNonUniformIndexingNative, robustBufferAccessUpdateAfterBind, quadDivergentImplicitLod, maxPerStageDescriptorUpdateAfterBindSamplers, maxPerStageDescriptorUpdateAfterBindUniformBuffers, maxPerStageDescriptorUpdateAfterBindStorageBuffers, maxPerStageDescriptorUpdateAfterBindSampledImages, maxPerStageDescriptorUpdateAfterBindStorageImages, maxPerStageDescriptorUpdateAfterBindInputAttachments, maxPerStageUpdateAfterBindResources, maxDescriptorSetUpdateAfterBindSamplers, maxDescriptorSetUpdateAfterBindUniformBuffers, maxDescriptorSetUpdateAfterBindUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindStorageBuffers, maxDescriptorSetUpdateAfterBindStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindSampledImages, maxDescriptorSetUpdateAfterBindStorageImages, maxDescriptorSetUpdateAfterBindInputAttachments ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); # endif } bool operator!=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties; void * pNext = {}; uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; Bool32 robustBufferAccessUpdateAfterBind = {}; Bool32 quadDivergentImplicitLod = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; uint32_t maxPerStageUpdateAfterBindResources = {}; uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorIndexingProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorIndexingProperties; }; using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties; // wrapper struct for struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV.html struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV { using NativeType = VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( Bool32 descriptorPoolOverallocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorPoolOverallocation{ descriptorPoolOverallocation_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & operator=( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setDescriptorPoolOverallocation( Bool32 descriptorPoolOverallocation_ ) & VULKAN_HPP_NOEXCEPT { descriptorPoolOverallocation = descriptorPoolOverallocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV && setDescriptorPoolOverallocation( Bool32 descriptorPoolOverallocation_ ) && VULKAN_HPP_NOEXCEPT { descriptorPoolOverallocation = descriptorPoolOverallocation_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorPoolOverallocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorPoolOverallocation == rhs.descriptorPoolOverallocation ); # endif } bool operator!=( PhysicalDeviceDescriptorPoolOverallocationFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV; void * pNext = {}; Bool32 descriptorPoolOverallocation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE.html struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { using NativeType = VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( Bool32 descriptorSetHostMapping_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorSetHostMapping{ descriptorSetHostMapping_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & operator=( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE & setDescriptorSetHostMapping( Bool32 descriptorSetHostMapping_ ) & VULKAN_HPP_NOEXCEPT { descriptorSetHostMapping = descriptorSetHostMapping_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE && setDescriptorSetHostMapping( Bool32 descriptorSetHostMapping_ ) && VULKAN_HPP_NOEXCEPT { descriptorSetHostMapping = descriptorSetHostMapping_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, descriptorSetHostMapping ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & ) const = default; #else bool operator==( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( descriptorSetHostMapping == rhs.descriptorSetHostMapping ); # endif } bool operator!=( PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; void * pNext = {}; Bool32 descriptorSetHostMapping = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; }; #endif template <> struct CppType { using Type = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; }; // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV.html struct PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( Bool32 deviceGeneratedCompute_ = {}, Bool32 deviceGeneratedComputePipelines_ = {}, Bool32 deviceGeneratedComputeCaptureReplay_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceGeneratedCompute{ deviceGeneratedCompute_ } , deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ } , deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedCompute( Bool32 deviceGeneratedCompute_ ) & VULKAN_HPP_NOEXCEPT { deviceGeneratedCompute = deviceGeneratedCompute_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV && setDeviceGeneratedCompute( Bool32 deviceGeneratedCompute_ ) && VULKAN_HPP_NOEXCEPT { deviceGeneratedCompute = deviceGeneratedCompute_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedComputePipelines( Bool32 deviceGeneratedComputePipelines_ ) & VULKAN_HPP_NOEXCEPT { deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV && setDeviceGeneratedComputePipelines( Bool32 deviceGeneratedComputePipelines_ ) && VULKAN_HPP_NOEXCEPT { deviceGeneratedComputePipelines = deviceGeneratedComputePipelines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setDeviceGeneratedComputeCaptureReplay( Bool32 deviceGeneratedComputeCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV && setDeviceGeneratedComputeCaptureReplay( Bool32 deviceGeneratedComputeCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { deviceGeneratedComputeCaptureReplay = deviceGeneratedComputeCaptureReplay_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceGeneratedCompute, deviceGeneratedComputePipelines, deviceGeneratedComputeCaptureReplay ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCompute == rhs.deviceGeneratedCompute ) && ( deviceGeneratedComputePipelines == rhs.deviceGeneratedComputePipelines ) && ( deviceGeneratedComputeCaptureReplay == rhs.deviceGeneratedComputeCaptureReplay ); # endif } bool operator!=( PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; void * pNext = {}; Bool32 deviceGeneratedCompute = {}; Bool32 deviceGeneratedComputePipelines = {}; Bool32 deviceGeneratedComputeCaptureReplay = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT.html struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( Bool32 deviceGeneratedCommands_ = {}, Bool32 dynamicGeneratedPipelineLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceGeneratedCommands{ deviceGeneratedCommands_ } , dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) & VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT && setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) && VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setDynamicGeneratedPipelineLayout( Bool32 dynamicGeneratedPipelineLayout_ ) & VULKAN_HPP_NOEXCEPT { dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT && setDynamicGeneratedPipelineLayout( Bool32 dynamicGeneratedPipelineLayout_ ) && VULKAN_HPP_NOEXCEPT { dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) && ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout ); # endif } bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; void * pNext = {}; Bool32 deviceGeneratedCommands = {}; Bool32 dynamicGeneratedPipelineLayout = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV.html struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( Bool32 deviceGeneratedCommands_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceGeneratedCommands{ deviceGeneratedCommands_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) & VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV && setDeviceGeneratedCommands( Bool32 deviceGeneratedCommands_ ) && VULKAN_HPP_NOEXCEPT { deviceGeneratedCommands = deviceGeneratedCommands_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceGeneratedCommands ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ); # endif } bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV; void * pNext = {}; Bool32 deviceGeneratedCommands = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT.html struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( uint32_t maxIndirectPipelineCount_ = {}, uint32_t maxIndirectShaderObjectCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsIndirectStride_ = {}, IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_ = {}, ShaderStageFlags supportedIndirectCommandsShaderStages_ = {}, ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_ = {}, ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_ = {}, Bool32 deviceGeneratedCommandsTransformFeedback_ = {}, Bool32 deviceGeneratedCommandsMultiDrawIndirectCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxIndirectPipelineCount{ maxIndirectPipelineCount_ } , maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ } , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } , maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ } , supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ } , supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ } , supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ } , supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ } , deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ } , deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxIndirectPipelineCount, maxIndirectShaderObjectCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsIndirectStride, supportedIndirectCommandsInputModes, supportedIndirectCommandsShaderStages, supportedIndirectCommandsShaderStagesPipelineBinding, supportedIndirectCommandsShaderStagesShaderBinding, deviceGeneratedCommandsTransformFeedback, deviceGeneratedCommandsMultiDrawIndirectCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount ) && ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride ) && ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes ) && ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages ) && ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding ) && ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding ) && ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback ) && ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount ); # endif } bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; void * pNext = {}; uint32_t maxIndirectPipelineCount = {}; uint32_t maxIndirectShaderObjectCount = {}; uint32_t maxIndirectSequenceCount = {}; uint32_t maxIndirectCommandsTokenCount = {}; uint32_t maxIndirectCommandsTokenOffset = {}; uint32_t maxIndirectCommandsIndirectStride = {}; IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes = {}; ShaderStageFlags supportedIndirectCommandsShaderStages = {}; ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding = {}; ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding = {}; Bool32 deviceGeneratedCommandsTransformFeedback = {}; Bool32 deviceGeneratedCommandsMultiDrawIndirectCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV.html struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ } , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } , maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ } , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } , maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ } , minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ } , minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ } , minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxGraphicsShaderGroupCount, maxIndirectSequenceCount, maxIndirectCommandsTokenCount, maxIndirectCommandsStreamCount, maxIndirectCommandsTokenOffset, maxIndirectCommandsStreamStride, minSequencesCountBufferOffsetAlignment, minSequencesIndexBufferOffsetAlignment, minIndirectCommandsBufferOffsetAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount ) && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride ) && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment ) && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment ) && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment ); # endif } bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; void * pNext = {}; uint32_t maxGraphicsShaderGroupCount = {}; uint32_t maxIndirectSequenceCount = {}; uint32_t maxIndirectCommandsTokenCount = {}; uint32_t maxIndirectCommandsStreamCount = {}; uint32_t maxIndirectCommandsTokenOffset = {}; uint32_t maxIndirectCommandsStreamStride = {}; uint32_t minSequencesCountBufferOffsetAlignment = {}; uint32_t minSequencesIndexBufferOffsetAlignment = {}; uint32_t minIndirectCommandsBufferOffsetAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDeviceMemoryReportFeaturesEXT.html struct PhysicalDeviceDeviceMemoryReportFeaturesEXT { using NativeType = VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( Bool32 deviceMemoryReport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceMemoryReport{ deviceMemoryReport_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( Bool32 deviceMemoryReport_ ) & VULKAN_HPP_NOEXCEPT { deviceMemoryReport = deviceMemoryReport_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT && setDeviceMemoryReport( Bool32 deviceMemoryReport_ ) && VULKAN_HPP_NOEXCEPT { deviceMemoryReport = deviceMemoryReport_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceMemoryReport ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); # endif } bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; void * pNext = {}; Bool32 deviceMemoryReport = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiagnosticsConfigFeaturesNV.html struct PhysicalDeviceDiagnosticsConfigFeaturesNV { using NativeType = VkPhysicalDeviceDiagnosticsConfigFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( Bool32 diagnosticsConfig_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , diagnosticsConfig{ diagnosticsConfig_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( Bool32 diagnosticsConfig_ ) & VULKAN_HPP_NOEXCEPT { diagnosticsConfig = diagnosticsConfig_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV && setDiagnosticsConfig( Bool32 diagnosticsConfig_ ) && VULKAN_HPP_NOEXCEPT { diagnosticsConfig = diagnosticsConfig_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, diagnosticsConfig ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); # endif } bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; void * pNext = {}; Bool32 diagnosticsConfig = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceDiscardRectanglePropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html struct PhysicalDeviceDiscardRectanglePropertiesEXT { using NativeType = VkPhysicalDeviceDiscardRectanglePropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxDiscardRectangles{ maxDiscardRectangles_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDiscardRectanglePropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxDiscardRectangles ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); # endif } bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; void * pNext = {}; uint32_t maxDiscardRectangles = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapFeaturesNV.html struct PhysicalDeviceDisplacementMicromapFeaturesNV { using NativeType = VkPhysicalDeviceDisplacementMicromapFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( Bool32 displacementMicromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , displacementMicromap{ displacementMicromap_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDisplacementMicromapFeaturesNV( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDisplacementMicromapFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDisplacementMicromapFeaturesNV & operator=( VkPhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setDisplacementMicromap( Bool32 displacementMicromap_ ) & VULKAN_HPP_NOEXCEPT { displacementMicromap = displacementMicromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV && setDisplacementMicromap( Bool32 displacementMicromap_ ) && VULKAN_HPP_NOEXCEPT { displacementMicromap = displacementMicromap_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, displacementMicromap ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDisplacementMicromapFeaturesNV const & ) const = default; # else bool operator==( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); # endif } bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapFeaturesNV; void * pNext = {}; Bool32 displacementMicromap = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDisplacementMicromapFeaturesNV; }; # endif template <> struct CppType { using Type = PhysicalDeviceDisplacementMicromapFeaturesNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDeviceDisplacementMicromapPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDisplacementMicromapPropertiesNV.html struct PhysicalDeviceDisplacementMicromapPropertiesNV { using NativeType = VkPhysicalDeviceDisplacementMicromapPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDisplacementMicromapPropertiesNV( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDisplacementMicromapPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDisplacementMicromapPropertiesNV & operator=( VkPhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDisplacementMicromapPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxDisplacementMicromapSubdivisionLevel ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDisplacementMicromapPropertiesNV const & ) const = default; # else bool operator==( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDisplacementMicromapSubdivisionLevel == rhs.maxDisplacementMicromapSubdivisionLevel ); # endif } bool operator!=( PhysicalDeviceDisplacementMicromapPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceDisplacementMicromapPropertiesNV; void * pNext = {}; uint32_t maxDisplacementMicromapSubdivisionLevel = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDisplacementMicromapPropertiesNV; }; # endif template <> struct CppType { using Type = PhysicalDeviceDisplacementMicromapPropertiesNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkPhysicalDeviceDriverProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDriverProperties.html struct PhysicalDeviceDriverProperties { using NativeType = VkPhysicalDeviceDriverProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( DriverId driverID_ = DriverId::eAmdProprietary, std::array const & driverName_ = {}, std::array const & driverInfo_ = {}, ConformanceVersion conformanceVersion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , driverID{ driverID_ } , driverName{ driverName_ } , driverInfo{ driverInfo_ } , conformanceVersion{ conformanceVersion_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDriverProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDriverProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDriverProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDriverProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &, ConformanceVersion const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, driverID, driverName, driverInfo, conformanceVersion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = driverID <=> rhs.driverID; cmp != 0 ) return cmp; if ( auto cmp = strcmp( driverName, rhs.driverName ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = strcmp( driverInfo, rhs.driverInfo ); cmp != 0 ) return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; if ( auto cmp = conformanceVersion <=> rhs.conformanceVersion; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( driverID == rhs.driverID ) && ( strcmp( driverName, rhs.driverName ) == 0 ) && ( strcmp( driverInfo, rhs.driverInfo ) == 0 ) && ( conformanceVersion == rhs.conformanceVersion ); } bool operator!=( PhysicalDeviceDriverProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::ePhysicalDeviceDriverProperties; void * pNext = {}; DriverId driverID = DriverId::eAmdProprietary; ArrayWrapper1D driverName = {}; ArrayWrapper1D driverInfo = {}; ConformanceVersion conformanceVersion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDriverProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceDriverProperties; }; using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties; // wrapper struct for struct VkPhysicalDeviceDrmPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDrmPropertiesEXT.html struct PhysicalDeviceDrmPropertiesEXT { using NativeType = VkPhysicalDeviceDrmPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDrmPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( Bool32 hasPrimary_ = {}, Bool32 hasRender_ = {}, int64_t primaryMajor_ = {}, int64_t primaryMinor_ = {}, int64_t renderMajor_ = {}, int64_t renderMinor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hasPrimary{ hasPrimary_ } , hasRender{ hasRender_ } , primaryMajor{ primaryMajor_ } , primaryMinor{ primaryMinor_ } , renderMajor{ renderMajor_ } , renderMinor{ renderMinor_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDrmPropertiesEXT( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDrmPropertiesEXT( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDrmPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDrmPropertiesEXT & operator=( PhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDrmPropertiesEXT & operator=( VkPhysicalDeviceDrmPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceDrmPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDrmPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDrmPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDrmPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, hasPrimary, hasRender, primaryMajor, primaryMinor, renderMajor, renderMinor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDrmPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hasPrimary == rhs.hasPrimary ) && ( hasRender == rhs.hasRender ) && ( primaryMajor == rhs.primaryMajor ) && ( primaryMinor == rhs.primaryMinor ) && ( renderMajor == rhs.renderMajor ) && ( renderMinor == rhs.renderMinor ); # endif } bool operator!=( PhysicalDeviceDrmPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDrmPropertiesEXT; void * pNext = {}; Bool32 hasPrimary = {}; Bool32 hasRender = {}; int64_t primaryMajor = {}; int64_t primaryMinor = {}; int64_t renderMajor = {}; int64_t renderMinor = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDrmPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDrmPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceDynamicRenderingFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingFeatures.html struct PhysicalDeviceDynamicRenderingFeatures { using NativeType = VkPhysicalDeviceDynamicRenderingFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( Bool32 dynamicRendering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicRendering{ dynamicRendering_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDynamicRenderingFeatures( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDynamicRenderingFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDynamicRenderingFeatures & operator=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDynamicRenderingFeatures & operator=( VkPhysicalDeviceDynamicRenderingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures & setDynamicRendering( Bool32 dynamicRendering_ ) & VULKAN_HPP_NOEXCEPT { dynamicRendering = dynamicRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingFeatures && setDynamicRendering( Bool32 dynamicRendering_ ) && VULKAN_HPP_NOEXCEPT { dynamicRendering = dynamicRendering_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDynamicRenderingFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dynamicRendering ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; #else bool operator==( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRendering == rhs.dynamicRendering ); # endif } bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingFeatures; void * pNext = {}; Bool32 dynamicRendering = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingFeatures; }; using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; // wrapper struct for struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingLocalReadFeatures.html struct PhysicalDeviceDynamicRenderingLocalReadFeatures { using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( Bool32 dynamicRenderingLocalRead_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setDynamicRenderingLocalRead( Bool32 dynamicRenderingLocalRead_ ) & VULKAN_HPP_NOEXCEPT { dynamicRenderingLocalRead = dynamicRenderingLocalRead_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures && setDynamicRenderingLocalRead( Bool32 dynamicRenderingLocalRead_ ) && VULKAN_HPP_NOEXCEPT { dynamicRenderingLocalRead = dynamicRenderingLocalRead_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dynamicRenderingLocalRead ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default; #else bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ); # endif } bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; void * pNext = {}; Bool32 dynamicRenderingLocalRead = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures; }; using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; // wrapper struct for struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT.html struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( Bool32 dynamicRenderingUnusedAttachments_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & operator=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & operator=( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & setDynamicRenderingUnusedAttachments( Bool32 dynamicRenderingUnusedAttachments_ ) & VULKAN_HPP_NOEXCEPT { dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT && setDynamicRenderingUnusedAttachments( Bool32 dynamicRenderingUnusedAttachments_ ) && VULKAN_HPP_NOEXCEPT { dynamicRenderingUnusedAttachments = dynamicRenderingUnusedAttachments_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dynamicRenderingUnusedAttachments ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicRenderingUnusedAttachments == rhs.dynamicRenderingUnusedAttachments ); # endif } bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; void * pNext = {}; Bool32 dynamicRenderingUnusedAttachments = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceExclusiveScissorFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html struct PhysicalDeviceExclusiveScissorFeaturesNV { using NativeType = VkPhysicalDeviceExclusiveScissorFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( Bool32 exclusiveScissor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , exclusiveScissor{ exclusiveScissor_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( Bool32 exclusiveScissor_ ) & VULKAN_HPP_NOEXCEPT { exclusiveScissor = exclusiveScissor_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV && setExclusiveScissor( Bool32 exclusiveScissor_ ) && VULKAN_HPP_NOEXCEPT { exclusiveScissor = exclusiveScissor_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExclusiveScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExclusiveScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, exclusiveScissor ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( exclusiveScissor == rhs.exclusiveScissor ); # endif } bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; void * pNext = {}; Bool32 exclusiveScissor = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExclusiveScissorFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceExclusiveScissorFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState2FeaturesEXT.html struct PhysicalDeviceExtendedDynamicState2FeaturesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( Bool32 extendedDynamicState2_ = {}, Bool32 extendedDynamicState2LogicOp_ = {}, Bool32 extendedDynamicState2PatchControlPoints_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , extendedDynamicState2{ extendedDynamicState2_ } , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState2FeaturesEXT( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2( Bool32 extendedDynamicState2_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState2 = extendedDynamicState2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT && setExtendedDynamicState2( Bool32 extendedDynamicState2_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState2 = extendedDynamicState2_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2LogicOp( Bool32 extendedDynamicState2LogicOp_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT && setExtendedDynamicState2LogicOp( Bool32 extendedDynamicState2LogicOp_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState2LogicOp = extendedDynamicState2LogicOp_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setExtendedDynamicState2PatchControlPoints( Bool32 extendedDynamicState2PatchControlPoints_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT && setExtendedDynamicState2PatchControlPoints( Bool32 extendedDynamicState2PatchControlPoints_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState2PatchControlPoints = extendedDynamicState2PatchControlPoints_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState2 == rhs.extendedDynamicState2 ) && ( extendedDynamicState2LogicOp == rhs.extendedDynamicState2LogicOp ) && ( extendedDynamicState2PatchControlPoints == rhs.extendedDynamicState2PatchControlPoints ); # endif } bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState2FeaturesEXT; void * pNext = {}; Bool32 extendedDynamicState2 = {}; Bool32 extendedDynamicState2LogicOp = {}; Bool32 extendedDynamicState2PatchControlPoints = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState2FeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3FeaturesEXT.html struct PhysicalDeviceExtendedDynamicState3FeaturesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT( Bool32 extendedDynamicState3TessellationDomainOrigin_ = {}, Bool32 extendedDynamicState3DepthClampEnable_ = {}, Bool32 extendedDynamicState3PolygonMode_ = {}, Bool32 extendedDynamicState3RasterizationSamples_ = {}, Bool32 extendedDynamicState3SampleMask_ = {}, Bool32 extendedDynamicState3AlphaToCoverageEnable_ = {}, Bool32 extendedDynamicState3AlphaToOneEnable_ = {}, Bool32 extendedDynamicState3LogicOpEnable_ = {}, Bool32 extendedDynamicState3ColorBlendEnable_ = {}, Bool32 extendedDynamicState3ColorBlendEquation_ = {}, Bool32 extendedDynamicState3ColorWriteMask_ = {}, Bool32 extendedDynamicState3RasterizationStream_ = {}, Bool32 extendedDynamicState3ConservativeRasterizationMode_ = {}, Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ = {}, Bool32 extendedDynamicState3DepthClipEnable_ = {}, Bool32 extendedDynamicState3SampleLocationsEnable_ = {}, Bool32 extendedDynamicState3ColorBlendAdvanced_ = {}, Bool32 extendedDynamicState3ProvokingVertexMode_ = {}, Bool32 extendedDynamicState3LineRasterizationMode_ = {}, Bool32 extendedDynamicState3LineStippleEnable_ = {}, Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ = {}, Bool32 extendedDynamicState3ViewportWScalingEnable_ = {}, Bool32 extendedDynamicState3ViewportSwizzle_ = {}, Bool32 extendedDynamicState3CoverageToColorEnable_ = {}, Bool32 extendedDynamicState3CoverageToColorLocation_ = {}, Bool32 extendedDynamicState3CoverageModulationMode_ = {}, Bool32 extendedDynamicState3CoverageModulationTableEnable_ = {}, Bool32 extendedDynamicState3CoverageModulationTable_ = {}, Bool32 extendedDynamicState3CoverageReductionMode_ = {}, Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ } , extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ } , extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ } , extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ } , extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ } , extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ } , extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ } , extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ } , extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ } , extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ } , extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ } , extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ } , extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ } , extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ } , extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ } , extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ } , extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ } , extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ } , extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ } , extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ } , extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ } , extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ } , extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ } , extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ } , extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ } , extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ } , extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ } , extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ } , extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ } , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3FeaturesEXT( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3TessellationDomainOrigin( Bool32 extendedDynamicState3TessellationDomainOrigin_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3TessellationDomainOrigin( Bool32 extendedDynamicState3TessellationDomainOrigin_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClampEnable( Bool32 extendedDynamicState3DepthClampEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3DepthClampEnable( Bool32 extendedDynamicState3DepthClampEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClampEnable = extendedDynamicState3DepthClampEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3PolygonMode( Bool32 extendedDynamicState3PolygonMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3PolygonMode( Bool32 extendedDynamicState3PolygonMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationSamples( Bool32 extendedDynamicState3RasterizationSamples_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3RasterizationSamples( Bool32 extendedDynamicState3RasterizationSamples_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleMask( Bool32 extendedDynamicState3SampleMask_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3SampleMask( Bool32 extendedDynamicState3SampleMask_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToCoverageEnable( Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3AlphaToCoverageEnable( Bool32 extendedDynamicState3AlphaToCoverageEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToOneEnable( Bool32 extendedDynamicState3AlphaToOneEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3AlphaToOneEnable( Bool32 extendedDynamicState3AlphaToOneEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LogicOpEnable( Bool32 extendedDynamicState3LogicOpEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3LogicOpEnable( Bool32 extendedDynamicState3LogicOpEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEnable( Bool32 extendedDynamicState3ColorBlendEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ColorBlendEnable( Bool32 extendedDynamicState3ColorBlendEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendEnable = extendedDynamicState3ColorBlendEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendEquation( Bool32 extendedDynamicState3ColorBlendEquation_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ColorBlendEquation( Bool32 extendedDynamicState3ColorBlendEquation_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorWriteMask( Bool32 extendedDynamicState3ColorWriteMask_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ColorWriteMask( Bool32 extendedDynamicState3ColorWriteMask_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationStream( Bool32 extendedDynamicState3RasterizationStream_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3RasterizationStream( Bool32 extendedDynamicState3RasterizationStream_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3RasterizationStream = extendedDynamicState3RasterizationStream_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ConservativeRasterizationMode( Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ConservativeRasterizationMode( Bool32 extendedDynamicState3ConservativeRasterizationMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ExtraPrimitiveOverestimationSize( Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ExtraPrimitiveOverestimationSize( Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipEnable( Bool32 extendedDynamicState3DepthClipEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3DepthClipEnable( Bool32 extendedDynamicState3DepthClipEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClipEnable = extendedDynamicState3DepthClipEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleLocationsEnable( Bool32 extendedDynamicState3SampleLocationsEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3SampleLocationsEnable( Bool32 extendedDynamicState3SampleLocationsEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendAdvanced( Bool32 extendedDynamicState3ColorBlendAdvanced_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ColorBlendAdvanced( Bool32 extendedDynamicState3ColorBlendAdvanced_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ColorBlendAdvanced = extendedDynamicState3ColorBlendAdvanced_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ProvokingVertexMode( Bool32 extendedDynamicState3ProvokingVertexMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ProvokingVertexMode( Bool32 extendedDynamicState3ProvokingVertexMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ProvokingVertexMode = extendedDynamicState3ProvokingVertexMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineRasterizationMode( Bool32 extendedDynamicState3LineRasterizationMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3LineRasterizationMode( Bool32 extendedDynamicState3LineRasterizationMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3LineRasterizationMode = extendedDynamicState3LineRasterizationMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3LineStippleEnable( Bool32 extendedDynamicState3LineStippleEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3LineStippleEnable( Bool32 extendedDynamicState3LineStippleEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3LineStippleEnable = extendedDynamicState3LineStippleEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipNegativeOneToOne( Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3DepthClipNegativeOneToOne( Bool32 extendedDynamicState3DepthClipNegativeOneToOne_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3DepthClipNegativeOneToOne = extendedDynamicState3DepthClipNegativeOneToOne_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportWScalingEnable( Bool32 extendedDynamicState3ViewportWScalingEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ViewportWScalingEnable( Bool32 extendedDynamicState3ViewportWScalingEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ViewportSwizzle( Bool32 extendedDynamicState3ViewportSwizzle_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ViewportSwizzle( Bool32 extendedDynamicState3ViewportSwizzle_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ViewportSwizzle = extendedDynamicState3ViewportSwizzle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorEnable( Bool32 extendedDynamicState3CoverageToColorEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageToColorEnable( Bool32 extendedDynamicState3CoverageToColorEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageToColorEnable = extendedDynamicState3CoverageToColorEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageToColorLocation( Bool32 extendedDynamicState3CoverageToColorLocation_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageToColorLocation( Bool32 extendedDynamicState3CoverageToColorLocation_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageToColorLocation = extendedDynamicState3CoverageToColorLocation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationMode( Bool32 extendedDynamicState3CoverageModulationMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageModulationMode( Bool32 extendedDynamicState3CoverageModulationMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationMode = extendedDynamicState3CoverageModulationMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTableEnable( Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageModulationTableEnable( Bool32 extendedDynamicState3CoverageModulationTableEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageModulationTable( Bool32 extendedDynamicState3CoverageModulationTable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageModulationTable( Bool32 extendedDynamicState3CoverageModulationTable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageModulationTable = extendedDynamicState3CoverageModulationTable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3CoverageReductionMode( Bool32 extendedDynamicState3CoverageReductionMode_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3CoverageReductionMode( Bool32 extendedDynamicState3CoverageReductionMode_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RepresentativeFragmentTestEnable( Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3RepresentativeFragmentTestEnable( Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3RepresentativeFragmentTestEnable = extendedDynamicState3RepresentativeFragmentTestEnable_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ShadingRateImageEnable( Bool32 extendedDynamicState3ShadingRateImageEnable_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT && setExtendedDynamicState3ShadingRateImageEnable( Bool32 extendedDynamicState3ShadingRateImageEnable_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState3ShadingRateImageEnable = extendedDynamicState3ShadingRateImageEnable_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3FeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, extendedDynamicState3TessellationDomainOrigin, extendedDynamicState3DepthClampEnable, extendedDynamicState3PolygonMode, extendedDynamicState3RasterizationSamples, extendedDynamicState3SampleMask, extendedDynamicState3AlphaToCoverageEnable, extendedDynamicState3AlphaToOneEnable, extendedDynamicState3LogicOpEnable, extendedDynamicState3ColorBlendEnable, extendedDynamicState3ColorBlendEquation, extendedDynamicState3ColorWriteMask, extendedDynamicState3RasterizationStream, extendedDynamicState3ConservativeRasterizationMode, extendedDynamicState3ExtraPrimitiveOverestimationSize, extendedDynamicState3DepthClipEnable, extendedDynamicState3SampleLocationsEnable, extendedDynamicState3ColorBlendAdvanced, extendedDynamicState3ProvokingVertexMode, extendedDynamicState3LineRasterizationMode, extendedDynamicState3LineStippleEnable, extendedDynamicState3DepthClipNegativeOneToOne, extendedDynamicState3ViewportWScalingEnable, extendedDynamicState3ViewportSwizzle, extendedDynamicState3CoverageToColorEnable, extendedDynamicState3CoverageToColorLocation, extendedDynamicState3CoverageModulationMode, extendedDynamicState3CoverageModulationTableEnable, extendedDynamicState3CoverageModulationTable, extendedDynamicState3CoverageReductionMode, extendedDynamicState3RepresentativeFragmentTestEnable, extendedDynamicState3ShadingRateImageEnable ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState3TessellationDomainOrigin == rhs.extendedDynamicState3TessellationDomainOrigin ) && ( extendedDynamicState3DepthClampEnable == rhs.extendedDynamicState3DepthClampEnable ) && ( extendedDynamicState3PolygonMode == rhs.extendedDynamicState3PolygonMode ) && ( extendedDynamicState3RasterizationSamples == rhs.extendedDynamicState3RasterizationSamples ) && ( extendedDynamicState3SampleMask == rhs.extendedDynamicState3SampleMask ) && ( extendedDynamicState3AlphaToCoverageEnable == rhs.extendedDynamicState3AlphaToCoverageEnable ) && ( extendedDynamicState3AlphaToOneEnable == rhs.extendedDynamicState3AlphaToOneEnable ) && ( extendedDynamicState3LogicOpEnable == rhs.extendedDynamicState3LogicOpEnable ) && ( extendedDynamicState3ColorBlendEnable == rhs.extendedDynamicState3ColorBlendEnable ) && ( extendedDynamicState3ColorBlendEquation == rhs.extendedDynamicState3ColorBlendEquation ) && ( extendedDynamicState3ColorWriteMask == rhs.extendedDynamicState3ColorWriteMask ) && ( extendedDynamicState3RasterizationStream == rhs.extendedDynamicState3RasterizationStream ) && ( extendedDynamicState3ConservativeRasterizationMode == rhs.extendedDynamicState3ConservativeRasterizationMode ) && ( extendedDynamicState3ExtraPrimitiveOverestimationSize == rhs.extendedDynamicState3ExtraPrimitiveOverestimationSize ) && ( extendedDynamicState3DepthClipEnable == rhs.extendedDynamicState3DepthClipEnable ) && ( extendedDynamicState3SampleLocationsEnable == rhs.extendedDynamicState3SampleLocationsEnable ) && ( extendedDynamicState3ColorBlendAdvanced == rhs.extendedDynamicState3ColorBlendAdvanced ) && ( extendedDynamicState3ProvokingVertexMode == rhs.extendedDynamicState3ProvokingVertexMode ) && ( extendedDynamicState3LineRasterizationMode == rhs.extendedDynamicState3LineRasterizationMode ) && ( extendedDynamicState3LineStippleEnable == rhs.extendedDynamicState3LineStippleEnable ) && ( extendedDynamicState3DepthClipNegativeOneToOne == rhs.extendedDynamicState3DepthClipNegativeOneToOne ) && ( extendedDynamicState3ViewportWScalingEnable == rhs.extendedDynamicState3ViewportWScalingEnable ) && ( extendedDynamicState3ViewportSwizzle == rhs.extendedDynamicState3ViewportSwizzle ) && ( extendedDynamicState3CoverageToColorEnable == rhs.extendedDynamicState3CoverageToColorEnable ) && ( extendedDynamicState3CoverageToColorLocation == rhs.extendedDynamicState3CoverageToColorLocation ) && ( extendedDynamicState3CoverageModulationMode == rhs.extendedDynamicState3CoverageModulationMode ) && ( extendedDynamicState3CoverageModulationTableEnable == rhs.extendedDynamicState3CoverageModulationTableEnable ) && ( extendedDynamicState3CoverageModulationTable == rhs.extendedDynamicState3CoverageModulationTable ) && ( extendedDynamicState3CoverageReductionMode == rhs.extendedDynamicState3CoverageReductionMode ) && ( extendedDynamicState3RepresentativeFragmentTestEnable == rhs.extendedDynamicState3RepresentativeFragmentTestEnable ) && ( extendedDynamicState3ShadingRateImageEnable == rhs.extendedDynamicState3ShadingRateImageEnable ); # endif } bool operator!=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3FeaturesEXT; void * pNext = {}; Bool32 extendedDynamicState3TessellationDomainOrigin = {}; Bool32 extendedDynamicState3DepthClampEnable = {}; Bool32 extendedDynamicState3PolygonMode = {}; Bool32 extendedDynamicState3RasterizationSamples = {}; Bool32 extendedDynamicState3SampleMask = {}; Bool32 extendedDynamicState3AlphaToCoverageEnable = {}; Bool32 extendedDynamicState3AlphaToOneEnable = {}; Bool32 extendedDynamicState3LogicOpEnable = {}; Bool32 extendedDynamicState3ColorBlendEnable = {}; Bool32 extendedDynamicState3ColorBlendEquation = {}; Bool32 extendedDynamicState3ColorWriteMask = {}; Bool32 extendedDynamicState3RasterizationStream = {}; Bool32 extendedDynamicState3ConservativeRasterizationMode = {}; Bool32 extendedDynamicState3ExtraPrimitiveOverestimationSize = {}; Bool32 extendedDynamicState3DepthClipEnable = {}; Bool32 extendedDynamicState3SampleLocationsEnable = {}; Bool32 extendedDynamicState3ColorBlendAdvanced = {}; Bool32 extendedDynamicState3ProvokingVertexMode = {}; Bool32 extendedDynamicState3LineRasterizationMode = {}; Bool32 extendedDynamicState3LineStippleEnable = {}; Bool32 extendedDynamicState3DepthClipNegativeOneToOne = {}; Bool32 extendedDynamicState3ViewportWScalingEnable = {}; Bool32 extendedDynamicState3ViewportSwizzle = {}; Bool32 extendedDynamicState3CoverageToColorEnable = {}; Bool32 extendedDynamicState3CoverageToColorLocation = {}; Bool32 extendedDynamicState3CoverageModulationMode = {}; Bool32 extendedDynamicState3CoverageModulationTableEnable = {}; Bool32 extendedDynamicState3CoverageModulationTable = {}; Bool32 extendedDynamicState3CoverageReductionMode = {}; Bool32 extendedDynamicState3RepresentativeFragmentTestEnable = {}; Bool32 extendedDynamicState3ShadingRateImageEnable = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState3FeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicState3PropertiesEXT.html struct PhysicalDeviceExtendedDynamicState3PropertiesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicState3PropertiesEXT( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState3PropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicState3PropertiesEXT & operator=( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicState3PropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, dynamicPrimitiveTopologyUnrestricted ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicPrimitiveTopologyUnrestricted == rhs.dynamicPrimitiveTopologyUnrestricted ); # endif } bool operator!=( PhysicalDeviceExtendedDynamicState3PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicState3PropertiesEXT; void * pNext = {}; Bool32 dynamicPrimitiveTopologyUnrestricted = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicState3PropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedDynamicStateFeaturesEXT.html struct PhysicalDeviceExtendedDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( Bool32 extendedDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , extendedDynamicState{ extendedDynamicState_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( Bool32 extendedDynamicState_ ) & VULKAN_HPP_NOEXCEPT { extendedDynamicState = extendedDynamicState_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT && setExtendedDynamicState( Bool32 extendedDynamicState_ ) && VULKAN_HPP_NOEXCEPT { extendedDynamicState = extendedDynamicState_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, extendedDynamicState ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedDynamicState == rhs.extendedDynamicState ); # endif } bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT; void * pNext = {}; Bool32 extendedDynamicState = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV.html struct PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV { using NativeType = VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( Bool32 extendedSparseAddressSpace_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , extendedSparseAddressSpace{ extendedSparseAddressSpace_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV & setExtendedSparseAddressSpace( Bool32 extendedSparseAddressSpace_ ) & VULKAN_HPP_NOEXCEPT { extendedSparseAddressSpace = extendedSparseAddressSpace_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV && setExtendedSparseAddressSpace( Bool32 extendedSparseAddressSpace_ ) && VULKAN_HPP_NOEXCEPT { extendedSparseAddressSpace = extendedSparseAddressSpace_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, extendedSparseAddressSpace ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpace == rhs.extendedSparseAddressSpace ); # endif } bool operator!=( PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; void * pNext = {}; Bool32 extendedSparseAddressSpace = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV.html struct PhysicalDeviceExtendedSparseAddressSpacePropertiesNV { using NativeType = VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( DeviceSize extendedSparseAddressSpaceSize_ = {}, ImageUsageFlags extendedSparseImageUsageFlags_ = {}, BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ } , extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ } , extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedSparseAddressSpacePropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExtendedSparseAddressSpacePropertiesNV & operator=( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, extendedSparseAddressSpaceSize, extendedSparseImageUsageFlags, extendedSparseBufferUsageFlags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( extendedSparseAddressSpaceSize == rhs.extendedSparseAddressSpaceSize ) && ( extendedSparseImageUsageFlags == rhs.extendedSparseImageUsageFlags ) && ( extendedSparseBufferUsageFlags == rhs.extendedSparseBufferUsageFlags ); # endif } bool operator!=( PhysicalDeviceExtendedSparseAddressSpacePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; void * pNext = {}; DeviceSize extendedSparseAddressSpaceSize = {}; ImageUsageFlags extendedSparseImageUsageFlags = {}; BufferUsageFlags extendedSparseBufferUsageFlags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceExternalBufferInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalBufferInfo.html struct PhysicalDeviceExternalBufferInfo { using NativeType = VkPhysicalDeviceExternalBufferInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( BufferCreateFlags flags_ = {}, BufferUsageFlags usage_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , usage{ usage_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalBufferInfo( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( BufferCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo && setFlags( BufferCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setUsage( BufferUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo && setUsage( BufferUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalBufferInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalBufferInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, usage, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalBufferInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); # endif } bool operator!=( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; const void * pNext = {}; BufferCreateFlags flags = {}; BufferUsageFlags usage = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalBufferInfo; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalBufferInfo; }; using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; // wrapper struct for struct VkPhysicalDeviceExternalComputeQueuePropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalComputeQueuePropertiesNV.html struct PhysicalDeviceExternalComputeQueuePropertiesNV { using NativeType = VkPhysicalDeviceExternalComputeQueuePropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV( uint32_t externalDataSize_ = {}, uint32_t maxExternalQueues_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalDataSize{ externalDataSize_ } , maxExternalQueues{ maxExternalQueues_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalComputeQueuePropertiesNV( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalComputeQueuePropertiesNV( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalComputeQueuePropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalComputeQueuePropertiesNV & operator=( VkPhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalComputeQueuePropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalComputeQueuePropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalComputeQueuePropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalDataSize, maxExternalQueues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalComputeQueuePropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalDataSize == rhs.externalDataSize ) && ( maxExternalQueues == rhs.maxExternalQueues ); # endif } bool operator!=( PhysicalDeviceExternalComputeQueuePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalComputeQueuePropertiesNV; void * pNext = {}; uint32_t externalDataSize = {}; uint32_t maxExternalQueues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalComputeQueuePropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalComputeQueuePropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceExternalFenceInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFenceInfo.html struct PhysicalDeviceExternalFenceInfo { using NativeType = VkPhysicalDeviceExternalFenceInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( ExternalFenceHandleTypeFlagBits handleType_ = ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalFenceInfo( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo && setHandleType( ExternalFenceHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalFenceInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFenceInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFenceInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalFenceInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); # endif } bool operator!=( PhysicalDeviceExternalFenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; const void * pNext = {}; ExternalFenceHandleTypeFlagBits handleType = ExternalFenceHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalFenceInfo; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalFenceInfo; }; using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolveFeaturesANDROID.html struct PhysicalDeviceExternalFormatResolveFeaturesANDROID { using NativeType = VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( Bool32 externalFormatResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalFormatResolve{ externalFormatResolve_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalFormatResolveFeaturesANDROID( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalFormatResolveFeaturesANDROID( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalFormatResolveFeaturesANDROID & operator=( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID & setExternalFormatResolve( Bool32 externalFormatResolve_ ) & VULKAN_HPP_NOEXCEPT { externalFormatResolve = externalFormatResolve_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFormatResolveFeaturesANDROID && setExternalFormatResolve( Bool32 externalFormatResolve_ ) && VULKAN_HPP_NOEXCEPT { externalFormatResolve = externalFormatResolve_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalFormatResolve ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & ) const = default; # else bool operator==( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalFormatResolve == rhs.externalFormatResolve ); # endif } bool operator!=( PhysicalDeviceExternalFormatResolveFeaturesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID; void * pNext = {}; Bool32 externalFormatResolve = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; }; # endif template <> struct CppType { using Type = PhysicalDeviceExternalFormatResolveFeaturesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) // wrapper struct for struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalFormatResolvePropertiesANDROID.html struct PhysicalDeviceExternalFormatResolvePropertiesANDROID { using NativeType = VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( Bool32 nullColorAttachmentWithExternalFormatResolve_ = {}, ChromaLocation externalFormatResolveChromaOffsetX_ = ChromaLocation::eCositedEven, ChromaLocation externalFormatResolveChromaOffsetY_ = ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ } , externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ } , externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolvePropertiesANDROID( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalFormatResolvePropertiesANDROID( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalFormatResolvePropertiesANDROID( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalFormatResolvePropertiesANDROID & operator=( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalFormatResolvePropertiesANDROID *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, nullColorAttachmentWithExternalFormatResolve, externalFormatResolveChromaOffsetX, externalFormatResolveChromaOffsetY ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & ) const = default; # else bool operator==( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nullColorAttachmentWithExternalFormatResolve == rhs.nullColorAttachmentWithExternalFormatResolve ) && ( externalFormatResolveChromaOffsetX == rhs.externalFormatResolveChromaOffsetX ) && ( externalFormatResolveChromaOffsetY == rhs.externalFormatResolveChromaOffsetY ); # endif } bool operator!=( PhysicalDeviceExternalFormatResolvePropertiesANDROID const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID; void * pNext = {}; Bool32 nullColorAttachmentWithExternalFormatResolve = {}; ChromaLocation externalFormatResolveChromaOffsetX = ChromaLocation::eCositedEven; ChromaLocation externalFormatResolveChromaOffsetY = ChromaLocation::eCositedEven; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; }; # endif template <> struct CppType { using Type = PhysicalDeviceExternalFormatResolvePropertiesANDROID; }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ // wrapper struct for struct VkPhysicalDeviceExternalImageFormatInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalImageFormatInfo.html struct PhysicalDeviceExternalImageFormatInfo { using NativeType = VkPhysicalDeviceExternalImageFormatInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalImageFormatInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalImageFormatInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); # endif } bool operator!=( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; const void * pNext = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalImageFormatInfo; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalImageFormatInfo; }; using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; // wrapper struct for struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html struct PhysicalDeviceExternalMemoryHostPropertiesEXT { using NativeType = VkPhysicalDeviceExternalMemoryHostPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( DeviceSize minImportedHostPointerAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minImportedHostPointerAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); # endif } bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; void * pNext = {}; DeviceSize minImportedHostPointerAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryRDMAFeaturesNV.html struct PhysicalDeviceExternalMemoryRDMAFeaturesNV { using NativeType = VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( Bool32 externalMemoryRDMA_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , externalMemoryRDMA{ externalMemoryRDMA_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryRDMAFeaturesNV( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalMemoryRDMAFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalMemoryRDMAFeaturesNV & operator=( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV & setExternalMemoryRDMA( Bool32 externalMemoryRDMA_ ) & VULKAN_HPP_NOEXCEPT { externalMemoryRDMA = externalMemoryRDMA_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryRDMAFeaturesNV && setExternalMemoryRDMA( Bool32 externalMemoryRDMA_ ) && VULKAN_HPP_NOEXCEPT { externalMemoryRDMA = externalMemoryRDMA_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryRDMAFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, externalMemoryRDMA ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( externalMemoryRDMA == rhs.externalMemoryRDMA ); # endif } bool operator!=( PhysicalDeviceExternalMemoryRDMAFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalMemoryRdmaFeaturesNV; void * pNext = {}; Bool32 externalMemoryRDMA = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalMemoryRDMAFeaturesNV; }; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) // wrapper struct for struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX.html struct PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX { using NativeType = VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( Bool32 screenBufferImport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , screenBufferImport{ screenBufferImport_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & operator=( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX & setScreenBufferImport( Bool32 screenBufferImport_ ) & VULKAN_HPP_NOEXCEPT { screenBufferImport = screenBufferImport_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX && setScreenBufferImport( Bool32 screenBufferImport_ ) && VULKAN_HPP_NOEXCEPT { screenBufferImport = screenBufferImport_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, screenBufferImport ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default; # else bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport ); # endif } bool operator!=( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; void * pNext = {}; Bool32 screenBufferImport = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; }; # endif template <> struct CppType { using Type = PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; }; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ // wrapper struct for struct VkPhysicalDeviceExternalSemaphoreInfo, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html struct PhysicalDeviceExternalSemaphoreInfo { using NativeType = VkPhysicalDeviceExternalSemaphoreInfo; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( ExternalSemaphoreHandleTypeFlagBits handleType_ = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo && setHandleType( ExternalSemaphoreHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalSemaphoreInfo const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalSemaphoreInfo const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalSemaphoreInfo *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); # endif } bool operator!=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; const void * pNext = {}; ExternalSemaphoreHandleTypeFlagBits handleType = ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalSemaphoreInfo; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalSemaphoreInfo; }; using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; // wrapper struct for struct VkPhysicalDeviceExternalTensorInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceExternalTensorInfoARM.html struct PhysicalDeviceExternalTensorInfoARM { using NativeType = VkPhysicalDeviceExternalTensorInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalTensorInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM( TensorCreateFlagsARM flags_ = {}, const TensorDescriptionARM * pDescription_ = {}, ExternalMemoryHandleTypeFlagBits handleType_ = ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pDescription{ pDescription_ } , handleType{ handleType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalTensorInfoARM( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceExternalTensorInfoARM( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExternalTensorInfoARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceExternalTensorInfoARM & operator=( PhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceExternalTensorInfoARM & operator=( VkPhysicalDeviceExternalTensorInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setFlags( TensorCreateFlagsARM flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM && setFlags( TensorCreateFlagsARM flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setPDescription( const TensorDescriptionARM * pDescription_ ) & VULKAN_HPP_NOEXCEPT { pDescription = pDescription_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM && setPDescription( const TensorDescriptionARM * pDescription_ ) && VULKAN_HPP_NOEXCEPT { pDescription = pDescription_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM & setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) & VULKAN_HPP_NOEXCEPT { handleType = handleType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalTensorInfoARM && setHandleType( ExternalMemoryHandleTypeFlagBits handleType_ ) && VULKAN_HPP_NOEXCEPT { handleType = handleType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceExternalTensorInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalTensorInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceExternalTensorInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceExternalTensorInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, flags, pDescription, handleType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalTensorInfoARM const & ) const = default; #else bool operator==( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pDescription == rhs.pDescription ) && ( handleType == rhs.handleType ); # endif } bool operator!=( PhysicalDeviceExternalTensorInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceExternalTensorInfoARM; const void * pNext = {}; TensorCreateFlagsARM flags = {}; const TensorDescriptionARM * pDescription = {}; ExternalMemoryHandleTypeFlagBits handleType = ExternalMemoryHandleTypeFlagBits::eOpaqueFd; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceExternalTensorInfoARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceExternalTensorInfoARM; }; // wrapper struct for struct VkPhysicalDeviceFaultFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFaultFeaturesEXT.html struct PhysicalDeviceFaultFeaturesEXT { using NativeType = VkPhysicalDeviceFaultFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFaultFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( Bool32 deviceFault_ = {}, Bool32 deviceFaultVendorBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceFault{ deviceFault_ } , deviceFaultVendorBinary{ deviceFaultVendorBinary_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFaultFeaturesEXT( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFaultFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFaultFeaturesEXT & operator=( PhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFaultFeaturesEXT & operator=( VkPhysicalDeviceFaultFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFault( Bool32 deviceFault_ ) & VULKAN_HPP_NOEXCEPT { deviceFault = deviceFault_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT && setDeviceFault( Bool32 deviceFault_ ) && VULKAN_HPP_NOEXCEPT { deviceFault = deviceFault_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT & setDeviceFaultVendorBinary( Bool32 deviceFaultVendorBinary_ ) & VULKAN_HPP_NOEXCEPT { deviceFaultVendorBinary = deviceFaultVendorBinary_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFaultFeaturesEXT && setDeviceFaultVendorBinary( Bool32 deviceFaultVendorBinary_ ) && VULKAN_HPP_NOEXCEPT { deviceFaultVendorBinary = deviceFaultVendorBinary_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFaultFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFaultFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFaultFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFaultFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceFault, deviceFaultVendorBinary ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFaultFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceFault == rhs.deviceFault ) && ( deviceFaultVendorBinary == rhs.deviceFaultVendorBinary ); # endif } bool operator!=( PhysicalDeviceFaultFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFaultFeaturesEXT; void * pNext = {}; Bool32 deviceFault = {}; Bool32 deviceFaultVendorBinary = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFaultFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFaultFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceFeatures2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFeatures2.html struct PhysicalDeviceFeatures2 { using NativeType = VkPhysicalDeviceFeatures2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , features{ features_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFeatures2( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & setFeatures( PhysicalDeviceFeatures const & features_ ) & VULKAN_HPP_NOEXCEPT { features = features_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 && setFeatures( PhysicalDeviceFeatures const & features_ ) && VULKAN_HPP_NOEXCEPT { features = features_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFeatures2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFeatures2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, features ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFeatures2 const & ) const = default; #else bool operator==( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( features == rhs.features ); # endif } bool operator!=( PhysicalDeviceFeatures2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFeatures2; void * pNext = {}; PhysicalDeviceFeatures features = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFeatures2; }; #endif template <> struct CppType { using Type = PhysicalDeviceFeatures2; }; using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; // wrapper struct for struct VkPhysicalDeviceFloatControlsProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFloatControlsProperties.html struct PhysicalDeviceFloatControlsProperties { using NativeType = VkPhysicalDeviceFloatControlsProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( ShaderFloatControlsIndependence denormBehaviorIndependence_ = ShaderFloatControlsIndependence::e32BitOnly, ShaderFloatControlsIndependence roundingModeIndependence_ = ShaderFloatControlsIndependence::e32BitOnly, Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, Bool32 shaderDenormPreserveFloat16_ = {}, Bool32 shaderDenormPreserveFloat32_ = {}, Bool32 shaderDenormPreserveFloat64_ = {}, Bool32 shaderDenormFlushToZeroFloat16_ = {}, Bool32 shaderDenormFlushToZeroFloat32_ = {}, Bool32 shaderDenormFlushToZeroFloat64_ = {}, Bool32 shaderRoundingModeRTEFloat16_ = {}, Bool32 shaderRoundingModeRTEFloat32_ = {}, Bool32 shaderRoundingModeRTEFloat64_ = {}, Bool32 shaderRoundingModeRTZFloat16_ = {}, Bool32 shaderRoundingModeRTZFloat32_ = {}, Bool32 shaderRoundingModeRTZFloat64_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , denormBehaviorIndependence{ denormBehaviorIndependence_ } , roundingModeIndependence{ roundingModeIndependence_ } , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFloatControlsProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFloatControlsProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFloatControlsProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFloatControlsProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, denormBehaviorIndependence, roundingModeIndependence, shaderSignedZeroInfNanPreserveFloat16, shaderSignedZeroInfNanPreserveFloat32, shaderSignedZeroInfNanPreserveFloat64, shaderDenormPreserveFloat16, shaderDenormPreserveFloat32, shaderDenormPreserveFloat64, shaderDenormFlushToZeroFloat16, shaderDenormFlushToZeroFloat32, shaderDenormFlushToZeroFloat64, shaderRoundingModeRTEFloat16, shaderRoundingModeRTEFloat32, shaderRoundingModeRTEFloat64, shaderRoundingModeRTZFloat16, shaderRoundingModeRTZFloat32, shaderRoundingModeRTZFloat64 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFloatControlsProperties const & ) const = default; #else bool operator==( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) && ( roundingModeIndependence == rhs.roundingModeIndependence ) && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); # endif } bool operator!=( PhysicalDeviceFloatControlsProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties; void * pNext = {}; ShaderFloatControlsIndependence denormBehaviorIndependence = ShaderFloatControlsIndependence::e32BitOnly; ShaderFloatControlsIndependence roundingModeIndependence = ShaderFloatControlsIndependence::e32BitOnly; Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; Bool32 shaderDenormPreserveFloat16 = {}; Bool32 shaderDenormPreserveFloat32 = {}; Bool32 shaderDenormPreserveFloat64 = {}; Bool32 shaderDenormFlushToZeroFloat16 = {}; Bool32 shaderDenormFlushToZeroFloat32 = {}; Bool32 shaderDenormFlushToZeroFloat64 = {}; Bool32 shaderRoundingModeRTEFloat16 = {}; Bool32 shaderRoundingModeRTEFloat32 = {}; Bool32 shaderRoundingModeRTEFloat64 = {}; Bool32 shaderRoundingModeRTZFloat16 = {}; Bool32 shaderRoundingModeRTZFloat32 = {}; Bool32 shaderRoundingModeRTZFloat64 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFloatControlsProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceFloatControlsProperties; }; using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties; // wrapper struct for struct VkPhysicalDeviceFormatPackFeaturesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFormatPackFeaturesARM.html struct PhysicalDeviceFormatPackFeaturesARM { using NativeType = VkPhysicalDeviceFormatPackFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFormatPackFeaturesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM( Bool32 formatPack_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , formatPack{ formatPack_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFormatPackFeaturesARM( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFormatPackFeaturesARM( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFormatPackFeaturesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFormatPackFeaturesARM & operator=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFormatPackFeaturesARM & operator=( VkPhysicalDeviceFormatPackFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM & setFormatPack( Bool32 formatPack_ ) & VULKAN_HPP_NOEXCEPT { formatPack = formatPack_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFormatPackFeaturesARM && setFormatPack( Bool32 formatPack_ ) && VULKAN_HPP_NOEXCEPT { formatPack = formatPack_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFormatPackFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFormatPackFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFormatPackFeaturesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFormatPackFeaturesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, formatPack ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFormatPackFeaturesARM const & ) const = default; #else bool operator==( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatPack == rhs.formatPack ); # endif } bool operator!=( PhysicalDeviceFormatPackFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFormatPackFeaturesARM; void * pNext = {}; Bool32 formatPack = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFormatPackFeaturesARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceFormatPackFeaturesARM; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2FeaturesEXT.html struct PhysicalDeviceFragmentDensityMap2FeaturesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( Bool32 fragmentDensityMapDeferred_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( Bool32 fragmentDensityMapDeferred_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMapDeferred = fragmentDensityMapDeferred_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT && setFragmentDensityMapDeferred( Bool32 fragmentDensityMapDeferred_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMapDeferred = fragmentDensityMapDeferred_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentDensityMapDeferred ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT; void * pNext = {}; Bool32 fragmentDensityMapDeferred = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMap2PropertiesEXT.html struct PhysicalDeviceFragmentDensityMap2PropertiesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( Bool32 subsampledLoads_ = {}, Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , subsampledLoads{ subsampledLoads_ } , subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ } , maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ } , maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, subsampledLoads, subsampledCoarseReconstructionEarlyAccess, maxSubsampledArrayLayers, maxDescriptorSetSubsampledSamplers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( subsampledLoads == rhs.subsampledLoads ) && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess ) && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers ) && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT; void * pNext = {}; Bool32 subsampledLoads = {}; Bool32 subsampledCoarseReconstructionEarlyAccess = {}; uint32_t maxSubsampledArrayLayers = {}; uint32_t maxDescriptorSetSubsampledSamplers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html struct PhysicalDeviceFragmentDensityMapFeaturesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( Bool32 fragmentDensityMap_ = {}, Bool32 fragmentDensityMapDynamic_ = {}, Bool32 fragmentDensityMapNonSubsampledImages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentDensityMap{ fragmentDensityMap_ } , fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ } , fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( Bool32 fragmentDensityMap_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMap = fragmentDensityMap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT && setFragmentDensityMap( Bool32 fragmentDensityMap_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMap = fragmentDensityMap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( Bool32 fragmentDensityMapDynamic_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMapDynamic = fragmentDensityMapDynamic_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT && setFragmentDensityMapDynamic( Bool32 fragmentDensityMapDynamic_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMapDynamic = fragmentDensityMapDynamic_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( Bool32 fragmentDensityMapNonSubsampledImages_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT && setFragmentDensityMapNonSubsampledImages( Bool32 fragmentDensityMapNonSubsampledImages_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentDensityMap, fragmentDensityMapDynamic, fragmentDensityMapNonSubsampledImages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMap == rhs.fragmentDensityMap ) && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; void * pNext = {}; Bool32 fragmentDensityMap = {}; Bool32 fragmentDensityMapDynamic = {}; Bool32 fragmentDensityMapNonSubsampledImages = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE.html struct PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE { using NativeType = VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( Bool32 fragmentDensityMapLayered_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentDensityMapLayered{ fragmentDensityMapLayered_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & operator=( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & operator=( VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE & setFragmentDensityMapLayered( Bool32 fragmentDensityMapLayered_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMapLayered = fragmentDensityMapLayered_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE && setFragmentDensityMapLayered( Bool32 fragmentDensityMapLayered_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMapLayered = fragmentDensityMapLayered_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentDensityMapLayered ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapLayered == rhs.fragmentDensityMapLayered ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE; void * pNext = {}; Bool32 fragmentDensityMapLayered = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapLayeredFeaturesVALVE; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE.html struct PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE { using NativeType = VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( uint32_t maxFragmentDensityMapLayers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxFragmentDensityMapLayers{ maxFragmentDensityMapLayers_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE & operator=( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE & operator=( VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxFragmentDensityMapLayers ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentDensityMapLayers == rhs.maxFragmentDensityMapLayers ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE; void * pNext = {}; uint32_t maxFragmentDensityMapLayers = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapLayeredPropertiesVALVE; }; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT.html struct PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( Bool32 fragmentDensityMapOffset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentDensityMapOffset{ fragmentDensityMapOffset_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT & setFragmentDensityMapOffset( Bool32 fragmentDensityMapOffset_ ) & VULKAN_HPP_NOEXCEPT { fragmentDensityMapOffset = fragmentDensityMapOffset_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT && setFragmentDensityMapOffset( Bool32 fragmentDensityMapOffset_ ) && VULKAN_HPP_NOEXCEPT { fragmentDensityMapOffset = fragmentDensityMapOffset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentDensityMapOffset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; void * pNext = {}; Bool32 fragmentDensityMapOffset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; }; using PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = PhysicalDeviceFragmentDensityMapOffsetFeaturesEXT; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT.html struct PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( Extent2D fragmentDensityOffsetGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapOffsetPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentDensityOffsetGranularity ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityOffsetGranularity == rhs.fragmentDensityOffsetGranularity ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; void * pNext = {}; Extent2D fragmentDensityOffsetGranularity = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; }; using PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = PhysicalDeviceFragmentDensityMapOffsetPropertiesEXT; // wrapper struct for struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html struct PhysicalDeviceFragmentDensityMapPropertiesEXT { using NativeType = VkPhysicalDeviceFragmentDensityMapPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( Extent2D minFragmentDensityTexelSize_ = {}, Extent2D maxFragmentDensityTexelSize_ = {}, Bool32 fragmentDensityInvocations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ } , maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ } , fragmentDensityInvocations{ fragmentDensityInvocations_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minFragmentDensityTexelSize, maxFragmentDensityTexelSize, fragmentDensityInvocations ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); # endif } bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; void * pNext = {}; Extent2D minFragmentDensityTexelSize = {}; Extent2D maxFragmentDensityTexelSize = {}; Bool32 fragmentDensityInvocations = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR.html struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR { using NativeType = VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( Bool32 fragmentShaderBarycentric_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setFragmentShaderBarycentric( Bool32 fragmentShaderBarycentric_ ) & VULKAN_HPP_NOEXCEPT { fragmentShaderBarycentric = fragmentShaderBarycentric_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR && setFragmentShaderBarycentric( Bool32 fragmentShaderBarycentric_ ) && VULKAN_HPP_NOEXCEPT { fragmentShaderBarycentric = fragmentShaderBarycentric_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentShaderBarycentric ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); # endif } bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR; void * pNext = {}; Bool32 fragmentShaderBarycentric = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; }; using PhysicalDeviceFragmentShaderBarycentricFeaturesNV = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR; // wrapper struct for struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR.html struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR { using NativeType = VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShaderBarycentricPropertiesKHR & operator=( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, triStripVertexOrderIndependentOfProvokingVertex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( triStripVertexOrderIndependentOfProvokingVertex == rhs.triStripVertexOrderIndependentOfProvokingVertex ); # endif } bool operator!=( PhysicalDeviceFragmentShaderBarycentricPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR; void * pNext = {}; Bool32 triStripVertexOrderIndependentOfProvokingVertex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT.html struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT { using NativeType = VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( Bool32 fragmentShaderSampleInterlock_ = {}, Bool32 fragmentShaderPixelInterlock_ = {}, Bool32 fragmentShaderShadingRateInterlock_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ } , fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ } , fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( Bool32 fragmentShaderSampleInterlock_ ) & VULKAN_HPP_NOEXCEPT { fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT && setFragmentShaderSampleInterlock( Bool32 fragmentShaderSampleInterlock_ ) && VULKAN_HPP_NOEXCEPT { fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( Bool32 fragmentShaderPixelInterlock_ ) & VULKAN_HPP_NOEXCEPT { fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT && setFragmentShaderPixelInterlock( Bool32 fragmentShaderPixelInterlock_ ) && VULKAN_HPP_NOEXCEPT { fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( Bool32 fragmentShaderShadingRateInterlock_ ) & VULKAN_HPP_NOEXCEPT { fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT && setFragmentShaderShadingRateInterlock( Bool32 fragmentShaderShadingRateInterlock_ ) && VULKAN_HPP_NOEXCEPT { fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentShaderSampleInterlock, fragmentShaderPixelInterlock, fragmentShaderShadingRateInterlock ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); # endif } bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; void * pNext = {}; Bool32 fragmentShaderSampleInterlock = {}; Bool32 fragmentShaderPixelInterlock = {}; Bool32 fragmentShaderShadingRateInterlock = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV.html struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV { using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( Bool32 fragmentShadingRateEnums_ = {}, Bool32 supersampleFragmentShadingRates_ = {}, Bool32 noInvocationFragmentShadingRates_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , fragmentShadingRateEnums{ fragmentShadingRateEnums_ } , supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ } , noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums( Bool32 fragmentShadingRateEnums_ ) & VULKAN_HPP_NOEXCEPT { fragmentShadingRateEnums = fragmentShadingRateEnums_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV && setFragmentShadingRateEnums( Bool32 fragmentShadingRateEnums_ ) && VULKAN_HPP_NOEXCEPT { fragmentShadingRateEnums = fragmentShadingRateEnums_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( Bool32 supersampleFragmentShadingRates_ ) & VULKAN_HPP_NOEXCEPT { supersampleFragmentShadingRates = supersampleFragmentShadingRates_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV && setSupersampleFragmentShadingRates( Bool32 supersampleFragmentShadingRates_ ) && VULKAN_HPP_NOEXCEPT { supersampleFragmentShadingRates = supersampleFragmentShadingRates_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( Bool32 noInvocationFragmentShadingRates_ ) & VULKAN_HPP_NOEXCEPT { noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV && setNoInvocationFragmentShadingRates( Bool32 noInvocationFragmentShadingRates_ ) && VULKAN_HPP_NOEXCEPT { noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, fragmentShadingRateEnums, supersampleFragmentShadingRates, noInvocationFragmentShadingRates ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums ) && ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates ) && ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates ); # endif } bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV; void * pNext = {}; Bool32 fragmentShadingRateEnums = {}; Bool32 supersampleFragmentShadingRates = {}; Bool32 noInvocationFragmentShadingRates = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV.html struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV { using NativeType = VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = SampleCountFlagBits::e1, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxFragmentShadingRateInvocationCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount ); # endif } bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV; void * pNext = {}; SampleCountFlagBits maxFragmentShadingRateInvocationCount = SampleCountFlagBits::e1; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateFeaturesKHR.html struct PhysicalDeviceFragmentShadingRateFeaturesKHR { using NativeType = VkPhysicalDeviceFragmentShadingRateFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( Bool32 pipelineFragmentShadingRate_ = {}, Bool32 primitiveFragmentShadingRate_ = {}, Bool32 attachmentFragmentShadingRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ } , primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ } , attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( Bool32 pipelineFragmentShadingRate_ ) & VULKAN_HPP_NOEXCEPT { pipelineFragmentShadingRate = pipelineFragmentShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR && setPipelineFragmentShadingRate( Bool32 pipelineFragmentShadingRate_ ) && VULKAN_HPP_NOEXCEPT { pipelineFragmentShadingRate = pipelineFragmentShadingRate_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( Bool32 primitiveFragmentShadingRate_ ) & VULKAN_HPP_NOEXCEPT { primitiveFragmentShadingRate = primitiveFragmentShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR && setPrimitiveFragmentShadingRate( Bool32 primitiveFragmentShadingRate_ ) && VULKAN_HPP_NOEXCEPT { primitiveFragmentShadingRate = primitiveFragmentShadingRate_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( Bool32 attachmentFragmentShadingRate_ ) & VULKAN_HPP_NOEXCEPT { attachmentFragmentShadingRate = attachmentFragmentShadingRate_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR && setAttachmentFragmentShadingRate( Bool32 attachmentFragmentShadingRate_ ) && VULKAN_HPP_NOEXCEPT { attachmentFragmentShadingRate = attachmentFragmentShadingRate_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineFragmentShadingRate, primitiveFragmentShadingRate, attachmentFragmentShadingRate ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate ) && ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate ) && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate ); # endif } bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR; void * pNext = {}; Bool32 pipelineFragmentShadingRate = {}; Bool32 primitiveFragmentShadingRate = {}; Bool32 attachmentFragmentShadingRate = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceFragmentShadingRateKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRateKHR.html struct PhysicalDeviceFragmentShadingRateKHR { using NativeType = VkPhysicalDeviceFragmentShadingRateKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( SampleCountFlags sampleCounts_ = {}, Extent2D fragmentSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , sampleCounts{ sampleCounts_ } , fragmentSize{ fragmentSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentShadingRateKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRateKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, sampleCounts, fragmentSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( sampleCounts == rhs.sampleCounts ) && ( fragmentSize == rhs.fragmentSize ); # endif } bool operator!=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR; void * pNext = {}; SampleCountFlags sampleCounts = {}; Extent2D fragmentSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRateKHR; }; // wrapper struct for struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFragmentShadingRatePropertiesKHR.html struct PhysicalDeviceFragmentShadingRatePropertiesKHR { using NativeType = VkPhysicalDeviceFragmentShadingRatePropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, Bool32 layeredShadingRateAttachments_ = {}, Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = SampleCountFlagBits::e1, Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, Bool32 fragmentShadingRateWithSampleMask_ = {}, Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ } , maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ } , maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ } , primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ } , layeredShadingRateAttachments{ layeredShadingRateAttachments_ } , fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ } , maxFragmentSize{ maxFragmentSize_ } , maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ } , maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ } , maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ } , fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ } , fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ } , fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ } , fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ } , fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ } , fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ } , fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSize, maxFragmentShadingRateAttachmentTexelSizeAspectRatio, primitiveFragmentShadingRateWithMultipleViewports, layeredShadingRateAttachments, fragmentShadingRateNonTrivialCombinerOps, maxFragmentSize, maxFragmentSizeAspectRatio, maxFragmentShadingRateCoverageSamples, maxFragmentShadingRateRasterizationSamples, fragmentShadingRateWithShaderDepthStencilWrites, fragmentShadingRateWithSampleMask, fragmentShadingRateWithShaderSampleMask, fragmentShadingRateWithConservativeRasterization, fragmentShadingRateWithFragmentShaderInterlock, fragmentShadingRateWithCustomSampleLocations, fragmentShadingRateStrictMultiplyCombiner ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize ) && ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize ) && ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio ) && ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports ) && ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments ) && ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps ) && ( maxFragmentSize == rhs.maxFragmentSize ) && ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio ) && ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples ) && ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples ) && ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites ) && ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask ) && ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask ) && ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization ) && ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock ) && ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations ) && ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner ); # endif } bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR; void * pNext = {}; Extent2D minFragmentShadingRateAttachmentTexelSize = {}; Extent2D maxFragmentShadingRateAttachmentTexelSize = {}; uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {}; Bool32 primitiveFragmentShadingRateWithMultipleViewports = {}; Bool32 layeredShadingRateAttachments = {}; Bool32 fragmentShadingRateNonTrivialCombinerOps = {}; Extent2D maxFragmentSize = {}; uint32_t maxFragmentSizeAspectRatio = {}; uint32_t maxFragmentShadingRateCoverageSamples = {}; SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = SampleCountFlagBits::e1; Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {}; Bool32 fragmentShadingRateWithSampleMask = {}; Bool32 fragmentShadingRateWithShaderSampleMask = {}; Bool32 fragmentShadingRateWithConservativeRasterization = {}; Bool32 fragmentShadingRateWithFragmentShaderInterlock = {}; Bool32 fragmentShadingRateWithCustomSampleLocations = {}; Bool32 fragmentShadingRateStrictMultiplyCombiner = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceFrameBoundaryFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceFrameBoundaryFeaturesEXT.html struct PhysicalDeviceFrameBoundaryFeaturesEXT { using NativeType = VkPhysicalDeviceFrameBoundaryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , frameBoundary{ frameBoundary_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceFrameBoundaryFeaturesEXT( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceFrameBoundaryFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceFrameBoundaryFeaturesEXT & operator=( VkPhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT & setFrameBoundary( Bool32 frameBoundary_ ) & VULKAN_HPP_NOEXCEPT { frameBoundary = frameBoundary_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFrameBoundaryFeaturesEXT && setFrameBoundary( Bool32 frameBoundary_ ) && VULKAN_HPP_NOEXCEPT { frameBoundary = frameBoundary_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFrameBoundaryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceFrameBoundaryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceFrameBoundaryFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, frameBoundary ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceFrameBoundaryFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( frameBoundary == rhs.frameBoundary ); # endif } bool operator!=( PhysicalDeviceFrameBoundaryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceFrameBoundaryFeaturesEXT; void * pNext = {}; Bool32 frameBoundary = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceGlobalPriorityQueryFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGlobalPriorityQueryFeatures.html struct PhysicalDeviceGlobalPriorityQueryFeatures { using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( Bool32 globalPriorityQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , globalPriorityQuery{ globalPriorityQuery_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setGlobalPriorityQuery( Bool32 globalPriorityQuery_ ) & VULKAN_HPP_NOEXCEPT { globalPriorityQuery = globalPriorityQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures && setGlobalPriorityQuery( Bool32 globalPriorityQuery_ ) && VULKAN_HPP_NOEXCEPT { globalPriorityQuery = globalPriorityQuery_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGlobalPriorityQueryFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceGlobalPriorityQueryFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, globalPriorityQuery ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default; #else bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ); # endif } bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; void * pNext = {}; Bool32 globalPriorityQuery = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceGlobalPriorityQueryFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceGlobalPriorityQueryFeatures; }; using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT.html struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( Bool32 graphicsPipelineLibrary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , graphicsPipelineLibrary{ graphicsPipelineLibrary_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT & setGraphicsPipelineLibrary( Bool32 graphicsPipelineLibrary_ ) & VULKAN_HPP_NOEXCEPT { graphicsPipelineLibrary = graphicsPipelineLibrary_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT && setGraphicsPipelineLibrary( Bool32 graphicsPipelineLibrary_ ) && VULKAN_HPP_NOEXCEPT { graphicsPipelineLibrary = graphicsPipelineLibrary_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, graphicsPipelineLibrary ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibrary == rhs.graphicsPipelineLibrary ); # endif } bool operator!=( PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; void * pNext = {}; Bool32 graphicsPipelineLibrary = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT.html struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { using NativeType = VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( Bool32 graphicsPipelineLibraryFastLinking_ = {}, Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ } , graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT & operator=( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, graphicsPipelineLibraryFastLinking, graphicsPipelineLibraryIndependentInterpolationDecoration ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( graphicsPipelineLibraryFastLinking == rhs.graphicsPipelineLibraryFastLinking ) && ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); # endif } bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; void * pNext = {}; Bool32 graphicsPipelineLibraryFastLinking = {}; Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceGroupProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceGroupProperties.html struct PhysicalDeviceGroupProperties { using NativeType = VkPhysicalDeviceGroupProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, std::array const & physicalDevices_ = {}, Bool32 subsetAllocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , physicalDeviceCount{ physicalDeviceCount_ } , physicalDevices{ physicalDevices_ } , subsetAllocation{ subsetAllocation_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceGroupProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceGroupProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceGroupProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceGroupProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, physicalDeviceCount, physicalDevices, subsetAllocation ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) return cmp; if ( auto cmp = physicalDeviceCount <=> rhs.physicalDeviceCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < physicalDeviceCount; ++i ) { if ( auto cmp = physicalDevices[i] <=> rhs.physicalDevices[i]; cmp != 0 ) return cmp; } if ( auto cmp = subsetAllocation <=> rhs.subsetAllocation; cmp != 0 ) return cmp; return std::strong_ordering::equivalent; } #endif bool operator==( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( physicalDeviceCount == rhs.physicalDeviceCount ) && ( memcmp( physicalDevices, rhs.physicalDevices, physicalDeviceCount * sizeof( PhysicalDevice ) ) == 0 ) && ( subsetAllocation == rhs.subsetAllocation ); } bool operator!=( PhysicalDeviceGroupProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: StructureType sType = StructureType::ePhysicalDeviceGroupProperties; void * pNext = {}; uint32_t physicalDeviceCount = {}; ArrayWrapper1D physicalDevices = {}; Bool32 subsetAllocation = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceGroupProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceGroupProperties; }; using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; // wrapper struct for struct VkPhysicalDeviceHdrVividFeaturesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHdrVividFeaturesHUAWEI.html struct PhysicalDeviceHdrVividFeaturesHUAWEI { using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hdrVivid{ hdrVivid_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( Bool32 hdrVivid_ ) & VULKAN_HPP_NOEXCEPT { hdrVivid = hdrVivid_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI && setHdrVivid( Bool32 hdrVivid_ ) && VULKAN_HPP_NOEXCEPT { hdrVivid = hdrVivid_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceHdrVividFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, hdrVivid ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default; #else bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid ); # endif } bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; void * pNext = {}; Bool32 hdrVivid = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; }; #endif template <> struct CppType { using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; }; // wrapper struct for struct VkPhysicalDeviceHostImageCopyFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyFeatures.html struct PhysicalDeviceHostImageCopyFeatures { using NativeType = VkPhysicalDeviceHostImageCopyFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hostImageCopy{ hostImageCopy_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( Bool32 hostImageCopy_ ) & VULKAN_HPP_NOEXCEPT { hostImageCopy = hostImageCopy_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures && setHostImageCopy( Bool32 hostImageCopy_ ) && VULKAN_HPP_NOEXCEPT { hostImageCopy = hostImageCopy_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, hostImageCopy ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default; #else bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostImageCopy == rhs.hostImageCopy ); # endif } bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeatures; void * pNext = {}; Bool32 hostImageCopy = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceHostImageCopyFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceHostImageCopyFeatures; }; using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; // wrapper struct for struct VkPhysicalDeviceHostImageCopyProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostImageCopyProperties.html struct PhysicalDeviceHostImageCopyProperties { using NativeType = VkPhysicalDeviceHostImageCopyProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( uint32_t copySrcLayoutCount_ = {}, ImageLayout * pCopySrcLayouts_ = {}, uint32_t copyDstLayoutCount_ = {}, ImageLayout * pCopyDstLayouts_ = {}, std::array const & optimalTilingLayoutUUID_ = {}, Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , copySrcLayoutCount{ copySrcLayoutCount_ } , pCopySrcLayouts{ pCopySrcLayouts_ } , copyDstLayoutCount{ copyDstLayoutCount_ } , pCopyDstLayouts{ pCopyDstLayouts_ } , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceHostImageCopyProperties( ArrayProxyNoTemporaries const & copySrcLayouts_, ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, std::array const & optimalTilingLayoutUUID_ = {}, Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr ) : pNext( pNext_ ) , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) , pCopySrcLayouts( copySrcLayouts_.data() ) , copyDstLayoutCount( static_cast( copyDstLayouts_.size() ) ) , pCopyDstLayouts( copyDstLayouts_.data() ) , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) & VULKAN_HPP_NOEXCEPT { copySrcLayoutCount = copySrcLayoutCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) && VULKAN_HPP_NOEXCEPT { copySrcLayoutCount = copySrcLayoutCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopySrcLayouts( ImageLayout * pCopySrcLayouts_ ) & VULKAN_HPP_NOEXCEPT { pCopySrcLayouts = pCopySrcLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setPCopySrcLayouts( ImageLayout * pCopySrcLayouts_ ) && VULKAN_HPP_NOEXCEPT { pCopySrcLayouts = pCopySrcLayouts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceHostImageCopyProperties & setCopySrcLayouts( ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT { copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); pCopySrcLayouts = copySrcLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) & VULKAN_HPP_NOEXCEPT { copyDstLayoutCount = copyDstLayoutCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) && VULKAN_HPP_NOEXCEPT { copyDstLayoutCount = copyDstLayoutCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopyDstLayouts( ImageLayout * pCopyDstLayouts_ ) & VULKAN_HPP_NOEXCEPT { pCopyDstLayouts = pCopyDstLayouts_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setPCopyDstLayouts( ImageLayout * pCopyDstLayouts_ ) && VULKAN_HPP_NOEXCEPT { pCopyDstLayouts = pCopyDstLayouts_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceHostImageCopyProperties & setCopyDstLayouts( ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT { copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); pCopyDstLayouts = copyDstLayouts_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) & VULKAN_HPP_NOEXCEPT { optimalTilingLayoutUUID = optimalTilingLayoutUUID_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) && VULKAN_HPP_NOEXCEPT { optimalTilingLayoutUUID = optimalTilingLayoutUUID_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setIdenticalMemoryTypeRequirements( Bool32 identicalMemoryTypeRequirements_ ) & VULKAN_HPP_NOEXCEPT { identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties && setIdenticalMemoryTypeRequirements( Bool32 identicalMemoryTypeRequirements_ ) && VULKAN_HPP_NOEXCEPT { identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceHostImageCopyProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, copySrcLayoutCount, pCopySrcLayouts, copyDstLayoutCount, pCopyDstLayouts, optimalTilingLayoutUUID, identicalMemoryTypeRequirements ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default; #else bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); # endif } bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceHostImageCopyProperties; void * pNext = {}; uint32_t copySrcLayoutCount = {}; ImageLayout * pCopySrcLayouts = {}; uint32_t copyDstLayoutCount = {}; ImageLayout * pCopyDstLayouts = {}; ArrayWrapper1D optimalTilingLayoutUUID = {}; Bool32 identicalMemoryTypeRequirements = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceHostImageCopyProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceHostImageCopyProperties; }; using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; // wrapper struct for struct VkPhysicalDeviceHostQueryResetFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceHostQueryResetFeatures.html struct PhysicalDeviceHostQueryResetFeatures { using NativeType = VkPhysicalDeviceHostQueryResetFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hostQueryReset{ hostQueryReset_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( Bool32 hostQueryReset_ ) & VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures && setHostQueryReset( Bool32 hostQueryReset_ ) && VULKAN_HPP_NOEXCEPT { hostQueryReset = hostQueryReset_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceHostQueryResetFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceHostQueryResetFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceHostQueryResetFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, hostQueryReset ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceHostQueryResetFeatures const & ) const = default; #else bool operator==( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hostQueryReset == rhs.hostQueryReset ); # endif } bool operator!=( PhysicalDeviceHostQueryResetFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures; void * pNext = {}; Bool32 hostQueryReset = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceHostQueryResetFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceHostQueryResetFeatures; }; using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures; // wrapper struct for struct VkPhysicalDeviceIDProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIDProperties.html struct PhysicalDeviceIDProperties { using NativeType = VkPhysicalDeviceIDProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const & deviceUUID_ = {}, std::array const & driverUUID_ = {}, std::array const & deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, Bool32 deviceLUIDValid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , deviceUUID{ deviceUUID_ } , driverUUID{ driverUUID_ } , deviceLUID{ deviceLUID_ } , deviceNodeMask{ deviceNodeMask_ } , deviceLUIDValid{ deviceLUIDValid_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceIDProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceIDProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceIDProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceIDProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &, ArrayWrapper1D const &, uint32_t const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, deviceUUID, driverUUID, deviceLUID, deviceNodeMask, deviceLUIDValid ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceIDProperties const & ) const = default; #else bool operator==( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceUUID == rhs.deviceUUID ) && ( driverUUID == rhs.driverUUID ) && ( deviceLUID == rhs.deviceLUID ) && ( deviceNodeMask == rhs.deviceNodeMask ) && ( deviceLUIDValid == rhs.deviceLUIDValid ); # endif } bool operator!=( PhysicalDeviceIDProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceIdProperties; void * pNext = {}; ArrayWrapper1D deviceUUID = {}; ArrayWrapper1D driverUUID = {}; ArrayWrapper1D deviceLUID = {}; uint32_t deviceNodeMask = {}; Bool32 deviceLUIDValid = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceIDProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceIDProperties; }; using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; // wrapper struct for struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImage2DViewOf3DFeaturesEXT.html struct PhysicalDeviceImage2DViewOf3DFeaturesEXT { using NativeType = VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( Bool32 image2DViewOf3D_ = {}, Bool32 sampler2DViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image2DViewOf3D{ image2DViewOf3D_ } , sampler2DViewOf3D{ sampler2DViewOf3D_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImage2DViewOf3DFeaturesEXT( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImage2DViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImage2DViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setImage2DViewOf3D( Bool32 image2DViewOf3D_ ) & VULKAN_HPP_NOEXCEPT { image2DViewOf3D = image2DViewOf3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT && setImage2DViewOf3D( Bool32 image2DViewOf3D_ ) && VULKAN_HPP_NOEXCEPT { image2DViewOf3D = image2DViewOf3D_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT & setSampler2DViewOf3D( Bool32 sampler2DViewOf3D_ ) & VULKAN_HPP_NOEXCEPT { sampler2DViewOf3D = sampler2DViewOf3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImage2DViewOf3DFeaturesEXT && setSampler2DViewOf3D( Bool32 sampler2DViewOf3D_ ) && VULKAN_HPP_NOEXCEPT { sampler2DViewOf3D = sampler2DViewOf3D_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImage2DViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image2DViewOf3D, sampler2DViewOf3D ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3D == rhs.image2DViewOf3D ) && ( sampler2DViewOf3D == rhs.sampler2DViewOf3D ); # endif } bool operator!=( PhysicalDeviceImage2DViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImage2DViewOf3DFeaturesEXT; void * pNext = {}; Bool32 image2DViewOf3D = {}; Bool32 sampler2DViewOf3D = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlFeaturesMESA.html struct PhysicalDeviceImageAlignmentControlFeaturesMESA { using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( Bool32 imageAlignmentControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageAlignmentControl{ imageAlignmentControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setImageAlignmentControl( Bool32 imageAlignmentControl_ ) & VULKAN_HPP_NOEXCEPT { imageAlignmentControl = imageAlignmentControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA && setImageAlignmentControl( Bool32 imageAlignmentControl_ ) && VULKAN_HPP_NOEXCEPT { imageAlignmentControl = imageAlignmentControl_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageAlignmentControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default; #else bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageAlignmentControl == rhs.imageAlignmentControl ); # endif } bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; void * pNext = {}; Bool32 imageAlignmentControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA; }; // wrapper struct for struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageAlignmentControlPropertiesMESA.html struct PhysicalDeviceImageAlignmentControlPropertiesMESA { using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( uint32_t supportedImageAlignmentMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , supportedImageAlignmentMask{ supportedImageAlignmentMask_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, supportedImageAlignmentMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default; #else bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask ); # endif } bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; void * pNext = {}; uint32_t supportedImageAlignmentMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA; }; // wrapper struct for struct VkPhysicalDeviceImageCompressionControlFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlFeaturesEXT.html struct PhysicalDeviceImageCompressionControlFeaturesEXT { using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( Bool32 imageCompressionControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageCompressionControl{ imageCompressionControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageCompressionControlFeaturesEXT( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageCompressionControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageCompressionControlFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT & setImageCompressionControl( Bool32 imageCompressionControl_ ) & VULKAN_HPP_NOEXCEPT { imageCompressionControl = imageCompressionControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlFeaturesEXT && setImageCompressionControl( Bool32 imageCompressionControl_ ) && VULKAN_HPP_NOEXCEPT { imageCompressionControl = imageCompressionControl_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageCompressionControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageCompressionControlFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControl == rhs.imageCompressionControl ); # endif } bool operator!=( PhysicalDeviceImageCompressionControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT; void * pNext = {}; Bool32 imageCompressionControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageCompressionControlFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT.html struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { using NativeType = VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( Bool32 imageCompressionControlSwapchain_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & operator=( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT & setImageCompressionControlSwapchain( Bool32 imageCompressionControlSwapchain_ ) & VULKAN_HPP_NOEXCEPT { imageCompressionControlSwapchain = imageCompressionControlSwapchain_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT && setImageCompressionControlSwapchain( Bool32 imageCompressionControlSwapchain_ ) && VULKAN_HPP_NOEXCEPT { imageCompressionControlSwapchain = imageCompressionControlSwapchain_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageCompressionControlSwapchain ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageCompressionControlSwapchain == rhs.imageCompressionControlSwapchain ); # endif } bool operator!=( PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; void * pNext = {}; Bool32 imageCompressionControlSwapchain = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html struct PhysicalDeviceImageDrmFormatModifierInfoEXT { using NativeType = VkPhysicalDeviceImageDrmFormatModifierInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, SharingMode sharingMode_ = SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , drmFormatModifier{ drmFormatModifier_ } , sharingMode{ sharingMode_ } , queueFamilyIndexCount{ queueFamilyIndexCount_ } , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, SharingMode sharingMode_, ArrayProxyNoTemporaries const & queueFamilyIndices_, const void * pNext_ = nullptr ) : pNext( pNext_ ) , drmFormatModifier( drmFormatModifier_ ) , sharingMode( sharingMode_ ) , queueFamilyIndexCount( static_cast( queueFamilyIndices_.size() ) ) , pQueueFamilyIndices( queueFamilyIndices_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) & VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT && setDrmFormatModifier( uint64_t drmFormatModifier_ ) && VULKAN_HPP_NOEXCEPT { drmFormatModifier = drmFormatModifier_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( SharingMode sharingMode_ ) & VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT && setSharingMode( SharingMode sharingMode_ ) && VULKAN_HPP_NOEXCEPT { sharingMode = sharingMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT && setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = queueFamilyIndexCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) & VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT && setPQueueFamilyIndices( const uint32_t * pQueueFamilyIndices_ ) && VULKAN_HPP_NOEXCEPT { pQueueFamilyIndices = pQueueFamilyIndices_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( ArrayProxyNoTemporaries const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT { queueFamilyIndexCount = static_cast( queueFamilyIndices_.size() ); pQueueFamilyIndices = queueFamilyIndices_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( drmFormatModifier == rhs.drmFormatModifier ) && ( sharingMode == rhs.sharingMode ) && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); # endif } bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; const void * pNext = {}; uint64_t drmFormatModifier = {}; SharingMode sharingMode = SharingMode::eExclusive; uint32_t queueFamilyIndexCount = {}; const uint32_t * pQueueFamilyIndices = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT; }; // wrapper struct for struct VkPhysicalDeviceImageFormatInfo2, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageFormatInfo2.html struct PhysicalDeviceImageFormatInfo2 { using NativeType = VkPhysicalDeviceImageFormatInfo2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( Format format_ = Format::eUndefined, ImageType type_ = ImageType::e1D, ImageTiling tiling_ = ImageTiling::eOptimal, ImageUsageFlags usage_ = {}, ImageCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , format{ format_ } , type{ type_ } , tiling{ tiling_ } , usage{ usage_ } , flags{ flags_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFormat( Format format_ ) & VULKAN_HPP_NOEXCEPT { format = format_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setFormat( Format format_ ) && VULKAN_HPP_NOEXCEPT { format = format_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setType( ImageType type_ ) & VULKAN_HPP_NOEXCEPT { type = type_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setType( ImageType type_ ) && VULKAN_HPP_NOEXCEPT { type = type_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setTiling( ImageTiling tiling_ ) & VULKAN_HPP_NOEXCEPT { tiling = tiling_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setTiling( ImageTiling tiling_ ) && VULKAN_HPP_NOEXCEPT { tiling = tiling_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setUsage( ImageUsageFlags usage_ ) & VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setUsage( ImageUsageFlags usage_ ) && VULKAN_HPP_NOEXCEPT { usage = usage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setFlags( ImageCreateFlags flags_ ) & VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 && setFlags( ImageCreateFlags flags_ ) && VULKAN_HPP_NOEXCEPT { flags = flags_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageFormatInfo2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageFormatInfo2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, format, type, tiling, usage, flags ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageFormatInfo2 const & ) const = default; #else bool operator==( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( format == rhs.format ) && ( type == rhs.type ) && ( tiling == rhs.tiling ) && ( usage == rhs.usage ) && ( flags == rhs.flags ); # endif } bool operator!=( PhysicalDeviceImageFormatInfo2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; const void * pNext = {}; Format format = Format::eUndefined; ImageType type = ImageType::e1D; ImageTiling tiling = ImageTiling::eOptimal; ImageUsageFlags usage = {}; ImageCreateFlags flags = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageFormatInfo2; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageFormatInfo2; }; using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; // wrapper struct for struct VkPhysicalDeviceImageProcessing2FeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2FeaturesQCOM.html struct PhysicalDeviceImageProcessing2FeaturesQCOM { using NativeType = VkPhysicalDeviceImageProcessing2FeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( Bool32 textureBlockMatch2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , textureBlockMatch2{ textureBlockMatch2_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageProcessing2FeaturesQCOM( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageProcessing2FeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageProcessing2FeaturesQCOM & operator=( VkPhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM & setTextureBlockMatch2( Bool32 textureBlockMatch2_ ) & VULKAN_HPP_NOEXCEPT { textureBlockMatch2 = textureBlockMatch2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessing2FeaturesQCOM && setTextureBlockMatch2( Bool32 textureBlockMatch2_ ) && VULKAN_HPP_NOEXCEPT { textureBlockMatch2 = textureBlockMatch2_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2FeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2FeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2FeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, textureBlockMatch2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageProcessing2FeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureBlockMatch2 == rhs.textureBlockMatch2 ); # endif } bool operator!=( PhysicalDeviceImageProcessing2FeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageProcessing2FeaturesQCOM; void * pNext = {}; Bool32 textureBlockMatch2 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageProcessing2FeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageProcessing2FeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceImageProcessing2PropertiesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessing2PropertiesQCOM.html struct PhysicalDeviceImageProcessing2PropertiesQCOM { using NativeType = VkPhysicalDeviceImageProcessing2PropertiesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( Extent2D maxBlockMatchWindow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxBlockMatchWindow{ maxBlockMatchWindow_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageProcessing2PropertiesQCOM( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageProcessing2PropertiesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageProcessing2PropertiesQCOM & operator=( VkPhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2PropertiesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2PropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessing2PropertiesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxBlockMatchWindow ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageProcessing2PropertiesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBlockMatchWindow == rhs.maxBlockMatchWindow ); # endif } bool operator!=( PhysicalDeviceImageProcessing2PropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageProcessing2PropertiesQCOM; void * pNext = {}; Extent2D maxBlockMatchWindow = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageProcessing2PropertiesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageProcessing2PropertiesQCOM; }; // wrapper struct for struct VkPhysicalDeviceImageProcessingFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingFeaturesQCOM.html struct PhysicalDeviceImageProcessingFeaturesQCOM { using NativeType = VkPhysicalDeviceImageProcessingFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( Bool32 textureSampleWeighted_ = {}, Bool32 textureBoxFilter_ = {}, Bool32 textureBlockMatch_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , textureSampleWeighted{ textureSampleWeighted_ } , textureBoxFilter{ textureBoxFilter_ } , textureBlockMatch{ textureBlockMatch_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingFeaturesQCOM( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageProcessingFeaturesQCOM( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageProcessingFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageProcessingFeaturesQCOM & operator=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageProcessingFeaturesQCOM & operator=( VkPhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureSampleWeighted( Bool32 textureSampleWeighted_ ) & VULKAN_HPP_NOEXCEPT { textureSampleWeighted = textureSampleWeighted_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM && setTextureSampleWeighted( Bool32 textureSampleWeighted_ ) && VULKAN_HPP_NOEXCEPT { textureSampleWeighted = textureSampleWeighted_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBoxFilter( Bool32 textureBoxFilter_ ) & VULKAN_HPP_NOEXCEPT { textureBoxFilter = textureBoxFilter_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM && setTextureBoxFilter( Bool32 textureBoxFilter_ ) && VULKAN_HPP_NOEXCEPT { textureBoxFilter = textureBoxFilter_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM & setTextureBlockMatch( Bool32 textureBlockMatch_ ) & VULKAN_HPP_NOEXCEPT { textureBlockMatch = textureBlockMatch_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageProcessingFeaturesQCOM && setTextureBlockMatch( Bool32 textureBlockMatch_ ) && VULKAN_HPP_NOEXCEPT { textureBlockMatch = textureBlockMatch_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, textureSampleWeighted, textureBoxFilter, textureBlockMatch ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageProcessingFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( textureSampleWeighted == rhs.textureSampleWeighted ) && ( textureBoxFilter == rhs.textureBoxFilter ) && ( textureBlockMatch == rhs.textureBlockMatch ); # endif } bool operator!=( PhysicalDeviceImageProcessingFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageProcessingFeaturesQCOM; void * pNext = {}; Bool32 textureSampleWeighted = {}; Bool32 textureBoxFilter = {}; Bool32 textureBlockMatch = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageProcessingFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageProcessingFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceImageProcessingPropertiesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageProcessingPropertiesQCOM.html struct PhysicalDeviceImageProcessingPropertiesQCOM { using NativeType = VkPhysicalDeviceImageProcessingPropertiesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( uint32_t maxWeightFilterPhases_ = {}, Extent2D maxWeightFilterDimension_ = {}, Extent2D maxBlockMatchRegion_ = {}, Extent2D maxBoxFilterBlockSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxWeightFilterPhases{ maxWeightFilterPhases_ } , maxWeightFilterDimension{ maxWeightFilterDimension_ } , maxBlockMatchRegion{ maxBlockMatchRegion_ } , maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessingPropertiesQCOM( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageProcessingPropertiesQCOM( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageProcessingPropertiesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageProcessingPropertiesQCOM & operator=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageProcessingPropertiesQCOM & operator=( VkPhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceImageProcessingPropertiesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingPropertiesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingPropertiesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageProcessingPropertiesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxWeightFilterPhases, maxWeightFilterDimension, maxBlockMatchRegion, maxBoxFilterBlockSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageProcessingPropertiesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxWeightFilterPhases == rhs.maxWeightFilterPhases ) && ( maxWeightFilterDimension == rhs.maxWeightFilterDimension ) && ( maxBlockMatchRegion == rhs.maxBlockMatchRegion ) && ( maxBoxFilterBlockSize == rhs.maxBoxFilterBlockSize ); # endif } bool operator!=( PhysicalDeviceImageProcessingPropertiesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageProcessingPropertiesQCOM; void * pNext = {}; uint32_t maxWeightFilterPhases = {}; Extent2D maxWeightFilterDimension = {}; Extent2D maxBlockMatchRegion = {}; Extent2D maxBoxFilterBlockSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageProcessingPropertiesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageProcessingPropertiesQCOM; }; // wrapper struct for struct VkPhysicalDeviceImageRobustnessFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageRobustnessFeatures.html struct PhysicalDeviceImageRobustnessFeatures { using NativeType = VkPhysicalDeviceImageRobustnessFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( Bool32 robustImageAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , robustImageAccess{ robustImageAccess_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageRobustnessFeatures( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageRobustnessFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageRobustnessFeatures & operator=( PhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageRobustnessFeatures & operator=( VkPhysicalDeviceImageRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures & setRobustImageAccess( Bool32 robustImageAccess_ ) & VULKAN_HPP_NOEXCEPT { robustImageAccess = robustImageAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeatures && setRobustImageAccess( Bool32 robustImageAccess_ ) && VULKAN_HPP_NOEXCEPT { robustImageAccess = robustImageAccess_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageRobustnessFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageRobustnessFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, robustImageAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageRobustnessFeatures const & ) const = default; #else bool operator==( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustImageAccess == rhs.robustImageAccess ); # endif } bool operator!=( PhysicalDeviceImageRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeatures; void * pNext = {}; Bool32 robustImageAccess = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageRobustnessFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageRobustnessFeatures; }; using PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures; // wrapper struct for struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT.html struct PhysicalDeviceImageSlicedViewOf3DFeaturesEXT { using NativeType = VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( Bool32 imageSlicedViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageSlicedViewOf3D{ imageSlicedViewOf3D_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & operator=( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT & setImageSlicedViewOf3D( Bool32 imageSlicedViewOf3D_ ) & VULKAN_HPP_NOEXCEPT { imageSlicedViewOf3D = imageSlicedViewOf3D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageSlicedViewOf3DFeaturesEXT && setImageSlicedViewOf3D( Bool32 imageSlicedViewOf3D_ ) && VULKAN_HPP_NOEXCEPT { imageSlicedViewOf3D = imageSlicedViewOf3D_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageSlicedViewOf3D ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageSlicedViewOf3D == rhs.imageSlicedViewOf3D ); # endif } bool operator!=( PhysicalDeviceImageSlicedViewOf3DFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT; void * pNext = {}; Bool32 imageSlicedViewOf3D = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageSlicedViewOf3DFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceImageViewImageFormatInfoEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewImageFormatInfoEXT.html struct PhysicalDeviceImageViewImageFormatInfoEXT { using NativeType = VkPhysicalDeviceImageViewImageFormatInfoEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( ImageViewType imageViewType_ = ImageViewType::e1D, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageViewType{ imageViewType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( ImageViewType imageViewType_ ) & VULKAN_HPP_NOEXCEPT { imageViewType = imageViewType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT && setImageViewType( ImageViewType imageViewType_ ) && VULKAN_HPP_NOEXCEPT { imageViewType = imageViewType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageViewImageFormatInfoEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewImageFormatInfoEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewImageFormatInfoEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imageViewType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageViewType == rhs.imageViewType ); # endif } bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; void * pNext = {}; ImageViewType imageViewType = ImageViewType::e1D; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageViewImageFormatInfoEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageViewImageFormatInfoEXT; }; // wrapper struct for struct VkPhysicalDeviceImageViewMinLodFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImageViewMinLodFeaturesEXT.html struct PhysicalDeviceImageViewMinLodFeaturesEXT { using NativeType = VkPhysicalDeviceImageViewMinLodFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minLod{ minLod_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImageViewMinLodFeaturesEXT( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageViewMinLodFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImageViewMinLodFeaturesEXT & operator=( VkPhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT & setMinLod( Bool32 minLod_ ) & VULKAN_HPP_NOEXCEPT { minLod = minLod_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewMinLodFeaturesEXT && setMinLod( Bool32 minLod_ ) && VULKAN_HPP_NOEXCEPT { minLod = minLod_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewMinLodFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewMinLodFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImageViewMinLodFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minLod ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageViewMinLodFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minLod == rhs.minLod ); # endif } bool operator!=( PhysicalDeviceImageViewMinLodFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImageViewMinLodFeaturesEXT; void * pNext = {}; Bool32 minLod = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceImageViewMinLodFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceImagelessFramebufferFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceImagelessFramebufferFeatures.html struct PhysicalDeviceImagelessFramebufferFeatures { using NativeType = VkPhysicalDeviceImagelessFramebufferFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( Bool32 imagelessFramebuffer_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imagelessFramebuffer{ imagelessFramebuffer_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( Bool32 imagelessFramebuffer_ ) & VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures && setImagelessFramebuffer( Bool32 imagelessFramebuffer_ ) && VULKAN_HPP_NOEXCEPT { imagelessFramebuffer = imagelessFramebuffer_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceImagelessFramebufferFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceImagelessFramebufferFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceImagelessFramebufferFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, imagelessFramebuffer ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const & ) const = default; #else bool operator==( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); # endif } bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures; void * pNext = {}; Bool32 imagelessFramebuffer = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceImagelessFramebufferFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceImagelessFramebufferFeatures; }; using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; // wrapper struct for struct VkPhysicalDeviceIndexTypeUint8Features, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceIndexTypeUint8Features.html struct PhysicalDeviceIndexTypeUint8Features { using NativeType = VkPhysicalDeviceIndexTypeUint8Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8Features; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indexTypeUint8{ indexTypeUint8_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( Bool32 indexTypeUint8_ ) & VULKAN_HPP_NOEXCEPT { indexTypeUint8 = indexTypeUint8_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features && setIndexTypeUint8( Bool32 indexTypeUint8_ ) && VULKAN_HPP_NOEXCEPT { indexTypeUint8 = indexTypeUint8_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceIndexTypeUint8Features const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceIndexTypeUint8Features *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, indexTypeUint8 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default; #else bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indexTypeUint8 == rhs.indexTypeUint8 ); # endif } bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8Features; void * pNext = {}; Bool32 indexTypeUint8 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceIndexTypeUint8Features; }; #endif template <> struct CppType { using Type = PhysicalDeviceIndexTypeUint8Features; }; using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; // wrapper struct for struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInheritedViewportScissorFeaturesNV.html struct PhysicalDeviceInheritedViewportScissorFeaturesNV { using NativeType = VkPhysicalDeviceInheritedViewportScissorFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( Bool32 inheritedViewportScissor2D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , inheritedViewportScissor2D{ inheritedViewportScissor2D_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInheritedViewportScissorFeaturesNV( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInheritedViewportScissorFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceInheritedViewportScissorFeaturesNV & operator=( VkPhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV & setInheritedViewportScissor2D( Bool32 inheritedViewportScissor2D_ ) & VULKAN_HPP_NOEXCEPT { inheritedViewportScissor2D = inheritedViewportScissor2D_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInheritedViewportScissorFeaturesNV && setInheritedViewportScissor2D( Bool32 inheritedViewportScissor2D_ ) && VULKAN_HPP_NOEXCEPT { inheritedViewportScissor2D = inheritedViewportScissor2D_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceInheritedViewportScissorFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, inheritedViewportScissor2D ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceInheritedViewportScissorFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inheritedViewportScissor2D == rhs.inheritedViewportScissor2D ); # endif } bool operator!=( PhysicalDeviceInheritedViewportScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceInheritedViewportScissorFeaturesNV; void * pNext = {}; Bool32 inheritedViewportScissor2D = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceInheritedViewportScissorFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockFeatures.html struct PhysicalDeviceInlineUniformBlockFeatures { using NativeType = VkPhysicalDeviceInlineUniformBlockFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( Bool32 inlineUniformBlock_ = {}, Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , inlineUniformBlock{ inlineUniformBlock_ } , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockFeatures( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInlineUniformBlockFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceInlineUniformBlockFeatures & operator=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceInlineUniformBlockFeatures & operator=( VkPhysicalDeviceInlineUniformBlockFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setInlineUniformBlock( Bool32 inlineUniformBlock_ ) & VULKAN_HPP_NOEXCEPT { inlineUniformBlock = inlineUniformBlock_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures && setInlineUniformBlock( Bool32 inlineUniformBlock_ ) && VULKAN_HPP_NOEXCEPT { inlineUniformBlock = inlineUniformBlock_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures & setDescriptorBindingInlineUniformBlockUpdateAfterBind( Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) & VULKAN_HPP_NOEXCEPT { descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeatures && setDescriptorBindingInlineUniformBlockUpdateAfterBind( Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) && VULKAN_HPP_NOEXCEPT { descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceInlineUniformBlockFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, inlineUniformBlock, descriptorBindingInlineUniformBlockUpdateAfterBind ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceInlineUniformBlockFeatures const & ) const = default; #else bool operator==( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( inlineUniformBlock == rhs.inlineUniformBlock ) && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); # endif } bool operator!=( PhysicalDeviceInlineUniformBlockFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeatures; void * pNext = {}; Bool32 inlineUniformBlock = {}; Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceInlineUniformBlockFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceInlineUniformBlockFeatures; }; using PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures; // wrapper struct for struct VkPhysicalDeviceInlineUniformBlockProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInlineUniformBlockProperties.html struct PhysicalDeviceInlineUniformBlockProperties { using NativeType = VkPhysicalDeviceInlineUniformBlockProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockProperties( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInlineUniformBlockProperties( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInlineUniformBlockProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceInlineUniformBlockProperties & operator=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceInlineUniformBlockProperties & operator=( VkPhysicalDeviceInlineUniformBlockProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceInlineUniformBlockProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceInlineUniformBlockProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxInlineUniformBlockSize, maxPerStageDescriptorInlineUniformBlocks, maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks, maxDescriptorSetInlineUniformBlocks, maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceInlineUniformBlockProperties const & ) const = default; #else bool operator==( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); # endif } bool operator!=( PhysicalDeviceInlineUniformBlockProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockProperties; void * pNext = {}; uint32_t maxInlineUniformBlockSize = {}; uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; uint32_t maxDescriptorSetInlineUniformBlocks = {}; uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceInlineUniformBlockProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceInlineUniformBlockProperties; }; using PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties; // wrapper struct for struct VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR.html struct PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR { using NativeType = VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( Bool32 internallySynchronizedQueues_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , internallySynchronizedQueues{ internallySynchronizedQueues_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & operator=( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & operator=( VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR & setInternallySynchronizedQueues( Bool32 internallySynchronizedQueues_ ) & VULKAN_HPP_NOEXCEPT { internallySynchronizedQueues = internallySynchronizedQueues_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR && setInternallySynchronizedQueues( Bool32 internallySynchronizedQueues_ ) && VULKAN_HPP_NOEXCEPT { internallySynchronizedQueues = internallySynchronizedQueues_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, internallySynchronizedQueues ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( internallySynchronizedQueues == rhs.internallySynchronizedQueues ); # endif } bool operator!=( PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; void * pNext = {}; Bool32 internallySynchronizedQueues = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceInternallySynchronizedQueuesFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html struct PhysicalDeviceInvocationMaskFeaturesHUAWEI { using NativeType = VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( Bool32 invocationMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , invocationMask{ invocationMask_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceInvocationMaskFeaturesHUAWEI( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceInvocationMaskFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceInvocationMaskFeaturesHUAWEI & operator=( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI & setInvocationMask( Bool32 invocationMask_ ) & VULKAN_HPP_NOEXCEPT { invocationMask = invocationMask_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInvocationMaskFeaturesHUAWEI && setInvocationMask( Bool32 invocationMask_ ) && VULKAN_HPP_NOEXCEPT { invocationMask = invocationMask_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceInvocationMaskFeaturesHUAWEI *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, invocationMask ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & ) const = default; #else bool operator==( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( invocationMask == rhs.invocationMask ); # endif } bool operator!=( PhysicalDeviceInvocationMaskFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceInvocationMaskFeaturesHUAWEI; void * pNext = {}; Bool32 invocationMask = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; }; #endif template <> struct CppType { using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; }; // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesKHR.html struct PhysicalDeviceLayeredApiPropertiesKHR { using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, PhysicalDeviceLayeredApiKHR layeredAPI_ = PhysicalDeviceLayeredApiKHR::eVulkan, std::array const & deviceName_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vendorID{ vendorID_ } , deviceID{ deviceID_ } , layeredAPI{ layeredAPI_ } , deviceName{ deviceName_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( layeredAPI == rhs.layeredAPI ) && ( deviceName == rhs.deviceName ); # endif } bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; void * pNext = {}; uint32_t vendorID = {}; uint32_t deviceID = {}; PhysicalDeviceLayeredApiKHR layeredAPI = PhysicalDeviceLayeredApiKHR::eVulkan; ArrayWrapper1D deviceName = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLayeredApiPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceLayeredApiPropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceLayeredApiPropertiesListKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiPropertiesListKHR.html struct PhysicalDeviceLayeredApiPropertiesListKHR { using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( uint32_t layeredApiCount_ = {}, PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , layeredApiCount{ layeredApiCount_ } , pLayeredApis{ pLayeredApis_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceLayeredApiPropertiesListKHR( ArrayProxyNoTemporaries const & layeredApis_, void * pNext_ = nullptr ) : pNext( pNext_ ), layeredApiCount( static_cast( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() ) { } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) & VULKAN_HPP_NOEXCEPT { layeredApiCount = layeredApiCount_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR && setLayeredApiCount( uint32_t layeredApiCount_ ) && VULKAN_HPP_NOEXCEPT { layeredApiCount = layeredApiCount_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPLayeredApis( PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) & VULKAN_HPP_NOEXCEPT { pLayeredApis = pLayeredApis_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR && setPLayeredApis( PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) && VULKAN_HPP_NOEXCEPT { pLayeredApis = pLayeredApis_; return std::move( *this ); } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApis( ArrayProxyNoTemporaries const & layeredApis_ ) VULKAN_HPP_NOEXCEPT { layeredApiCount = static_cast( layeredApis_.size() ); pLayeredApis = layeredApis_.data(); return *this; } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesListKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiPropertiesListKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, layeredApiCount, pLayeredApis ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default; #else bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layeredApiCount == rhs.layeredApiCount ) && ( pLayeredApis == rhs.pLayeredApis ); # endif } bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; void * pNext = {}; uint32_t layeredApiCount = {}; PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLayeredApiPropertiesListKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceLayeredApiPropertiesListKHR; }; // wrapper struct for struct VkPhysicalDeviceLimits, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLimits.html struct PhysicalDeviceLimits { using NativeType = VkPhysicalDeviceLimits; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, DeviceSize bufferImageGranularity_ = {}, DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array const & maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array const & maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array const & maxViewportDimensions_ = {}, std::array const & viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, DeviceSize minTexelBufferOffsetAlignment_ = {}, DeviceSize minUniformBufferOffsetAlignment_ = {}, DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, SampleCountFlags framebufferColorSampleCounts_ = {}, SampleCountFlags framebufferDepthSampleCounts_ = {}, SampleCountFlags framebufferStencilSampleCounts_ = {}, SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, SampleCountFlags sampledImageColorSampleCounts_ = {}, SampleCountFlags sampledImageIntegerSampleCounts_ = {}, SampleCountFlags sampledImageDepthSampleCounts_ = {}, SampleCountFlags sampledImageStencilSampleCounts_ = {}, SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array const & pointSizeRange_ = {}, std::array const & lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, Bool32 strictLines_ = {}, Bool32 standardSampleLocations_ = {}, DeviceSize optimalBufferCopyOffsetAlignment_ = {}, DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT : maxImageDimension1D{ maxImageDimension1D_ } , maxImageDimension2D{ maxImageDimension2D_ } , maxImageDimension3D{ maxImageDimension3D_ } , maxImageDimensionCube{ maxImageDimensionCube_ } , maxImageArrayLayers{ maxImageArrayLayers_ } , maxTexelBufferElements{ maxTexelBufferElements_ } , maxUniformBufferRange{ maxUniformBufferRange_ } , maxStorageBufferRange{ maxStorageBufferRange_ } , maxPushConstantsSize{ maxPushConstantsSize_ } , maxMemoryAllocationCount{ maxMemoryAllocationCount_ } , maxSamplerAllocationCount{ maxSamplerAllocationCount_ } , bufferImageGranularity{ bufferImageGranularity_ } , sparseAddressSpaceSize{ sparseAddressSpaceSize_ } , maxBoundDescriptorSets{ maxBoundDescriptorSets_ } , maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ } , maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ } , maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ } , maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ } , maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ } , maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ } , maxPerStageResources{ maxPerStageResources_ } , maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ } , maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ } , maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ } , maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ } , maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ } , maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ } , maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ } , maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ } , maxVertexInputAttributes{ maxVertexInputAttributes_ } , maxVertexInputBindings{ maxVertexInputBindings_ } , maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ } , maxVertexInputBindingStride{ maxVertexInputBindingStride_ } , maxVertexOutputComponents{ maxVertexOutputComponents_ } , maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ } , maxTessellationPatchSize{ maxTessellationPatchSize_ } , maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ } , maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ } , maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ } , maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ } , maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ } , maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ } , maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ } , maxGeometryInputComponents{ maxGeometryInputComponents_ } , maxGeometryOutputComponents{ maxGeometryOutputComponents_ } , maxGeometryOutputVertices{ maxGeometryOutputVertices_ } , maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ } , maxFragmentInputComponents{ maxFragmentInputComponents_ } , maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ } , maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ } , maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ } , maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ } , maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ } , maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ } , maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ } , subPixelPrecisionBits{ subPixelPrecisionBits_ } , subTexelPrecisionBits{ subTexelPrecisionBits_ } , mipmapPrecisionBits{ mipmapPrecisionBits_ } , maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ } , maxDrawIndirectCount{ maxDrawIndirectCount_ } , maxSamplerLodBias{ maxSamplerLodBias_ } , maxSamplerAnisotropy{ maxSamplerAnisotropy_ } , maxViewports{ maxViewports_ } , maxViewportDimensions{ maxViewportDimensions_ } , viewportBoundsRange{ viewportBoundsRange_ } , viewportSubPixelBits{ viewportSubPixelBits_ } , minMemoryMapAlignment{ minMemoryMapAlignment_ } , minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ } , minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ } , minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ } , minTexelOffset{ minTexelOffset_ } , maxTexelOffset{ maxTexelOffset_ } , minTexelGatherOffset{ minTexelGatherOffset_ } , maxTexelGatherOffset{ maxTexelGatherOffset_ } , minInterpolationOffset{ minInterpolationOffset_ } , maxInterpolationOffset{ maxInterpolationOffset_ } , subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ } , maxFramebufferWidth{ maxFramebufferWidth_ } , maxFramebufferHeight{ maxFramebufferHeight_ } , maxFramebufferLayers{ maxFramebufferLayers_ } , framebufferColorSampleCounts{ framebufferColorSampleCounts_ } , framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ } , framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ } , framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ } , maxColorAttachments{ maxColorAttachments_ } , sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ } , sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ } , sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ } , sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ } , storageImageSampleCounts{ storageImageSampleCounts_ } , maxSampleMaskWords{ maxSampleMaskWords_ } , timestampComputeAndGraphics{ timestampComputeAndGraphics_ } , timestampPeriod{ timestampPeriod_ } , maxClipDistances{ maxClipDistances_ } , maxCullDistances{ maxCullDistances_ } , maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ } , discreteQueuePriorities{ discreteQueuePriorities_ } , pointSizeRange{ pointSizeRange_ } , lineWidthRange{ lineWidthRange_ } , pointSizeGranularity{ pointSizeGranularity_ } , lineWidthGranularity{ lineWidthGranularity_ } , strictLines{ strictLines_ } , standardSampleLocations{ standardSampleLocations_ } , optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ } , optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ } , nonCoherentAtomSize{ nonCoherentAtomSize_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLimits( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLimits const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLimits const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLimits *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, ArrayWrapper1D const &, ArrayWrapper1D const &, uint32_t const &, size_t const &, DeviceSize const &, DeviceSize const &, DeviceSize const &, int32_t const &, uint32_t const &, int32_t const &, uint32_t const &, float const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, SampleCountFlags const &, SampleCountFlags const &, SampleCountFlags const &, SampleCountFlags const &, uint32_t const &, SampleCountFlags const &, SampleCountFlags const &, SampleCountFlags const &, SampleCountFlags const &, SampleCountFlags const &, uint32_t const &, Bool32 const &, float const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, ArrayWrapper1D const &, ArrayWrapper1D const &, float const &, float const &, Bool32 const &, Bool32 const &, DeviceSize const &, DeviceSize const &, DeviceSize const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( maxImageDimension1D, maxImageDimension2D, maxImageDimension3D, maxImageDimensionCube, maxImageArrayLayers, maxTexelBufferElements, maxUniformBufferRange, maxStorageBufferRange, maxPushConstantsSize, maxMemoryAllocationCount, maxSamplerAllocationCount, bufferImageGranularity, sparseAddressSpaceSize, maxBoundDescriptorSets, maxPerStageDescriptorSamplers, maxPerStageDescriptorUniformBuffers, maxPerStageDescriptorStorageBuffers, maxPerStageDescriptorSampledImages, maxPerStageDescriptorStorageImages, maxPerStageDescriptorInputAttachments, maxPerStageResources, maxDescriptorSetSamplers, maxDescriptorSetUniformBuffers, maxDescriptorSetUniformBuffersDynamic, maxDescriptorSetStorageBuffers, maxDescriptorSetStorageBuffersDynamic, maxDescriptorSetSampledImages, maxDescriptorSetStorageImages, maxDescriptorSetInputAttachments, maxVertexInputAttributes, maxVertexInputBindings, maxVertexInputAttributeOffset, maxVertexInputBindingStride, maxVertexOutputComponents, maxTessellationGenerationLevel, maxTessellationPatchSize, maxTessellationControlPerVertexInputComponents, maxTessellationControlPerVertexOutputComponents, maxTessellationControlPerPatchOutputComponents, maxTessellationControlTotalOutputComponents, maxTessellationEvaluationInputComponents, maxTessellationEvaluationOutputComponents, maxGeometryShaderInvocations, maxGeometryInputComponents, maxGeometryOutputComponents, maxGeometryOutputVertices, maxGeometryTotalOutputComponents, maxFragmentInputComponents, maxFragmentOutputAttachments, maxFragmentDualSrcAttachments, maxFragmentCombinedOutputResources, maxComputeSharedMemorySize, maxComputeWorkGroupCount, maxComputeWorkGroupInvocations, maxComputeWorkGroupSize, subPixelPrecisionBits, subTexelPrecisionBits, mipmapPrecisionBits, maxDrawIndexedIndexValue, maxDrawIndirectCount, maxSamplerLodBias, maxSamplerAnisotropy, maxViewports, maxViewportDimensions, viewportBoundsRange, viewportSubPixelBits, minMemoryMapAlignment, minTexelBufferOffsetAlignment, minUniformBufferOffsetAlignment, minStorageBufferOffsetAlignment, minTexelOffset, maxTexelOffset, minTexelGatherOffset, maxTexelGatherOffset, minInterpolationOffset, maxInterpolationOffset, subPixelInterpolationOffsetBits, maxFramebufferWidth, maxFramebufferHeight, maxFramebufferLayers, framebufferColorSampleCounts, framebufferDepthSampleCounts, framebufferStencilSampleCounts, framebufferNoAttachmentsSampleCounts, maxColorAttachments, sampledImageColorSampleCounts, sampledImageIntegerSampleCounts, sampledImageDepthSampleCounts, sampledImageStencilSampleCounts, storageImageSampleCounts, maxSampleMaskWords, timestampComputeAndGraphics, timestampPeriod, maxClipDistances, maxCullDistances, maxCombinedClipAndCullDistances, discreteQueuePriorities, pointSizeRange, lineWidthRange, pointSizeGranularity, lineWidthGranularity, strictLines, standardSampleLocations, optimalBufferCopyOffsetAlignment, optimalBufferCopyRowPitchAlignment, nonCoherentAtomSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLimits const & ) const = default; #else bool operator==( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( maxImageDimension1D == rhs.maxImageDimension1D ) && ( maxImageDimension2D == rhs.maxImageDimension2D ) && ( maxImageDimension3D == rhs.maxImageDimension3D ) && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) && ( bufferImageGranularity == rhs.bufferImageGranularity ) && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) && ( maxPerStageResources == rhs.maxPerStageResources ) && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount ) && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize ) && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) && ( maxViewports == rhs.maxViewports ) && ( maxViewportDimensions == rhs.maxViewportDimensions ) && ( viewportBoundsRange == rhs.viewportBoundsRange ) && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) && ( minTexelOffset == rhs.minTexelOffset ) && ( maxTexelOffset == rhs.maxTexelOffset ) && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) && ( minInterpolationOffset == rhs.minInterpolationOffset ) && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) && ( maxColorAttachments == rhs.maxColorAttachments ) && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) && ( timestampPeriod == rhs.timestampPeriod ) && ( maxClipDistances == rhs.maxClipDistances ) && ( maxCullDistances == rhs.maxCullDistances ) && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) && ( pointSizeRange == rhs.pointSizeRange ) && ( lineWidthRange == rhs.lineWidthRange ) && ( pointSizeGranularity == rhs.pointSizeGranularity ) && ( lineWidthGranularity == rhs.lineWidthGranularity ) && ( strictLines == rhs.strictLines ) && ( standardSampleLocations == rhs.standardSampleLocations ) && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); # endif } bool operator!=( PhysicalDeviceLimits const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: uint32_t maxImageDimension1D = {}; uint32_t maxImageDimension2D = {}; uint32_t maxImageDimension3D = {}; uint32_t maxImageDimensionCube = {}; uint32_t maxImageArrayLayers = {}; uint32_t maxTexelBufferElements = {}; uint32_t maxUniformBufferRange = {}; uint32_t maxStorageBufferRange = {}; uint32_t maxPushConstantsSize = {}; uint32_t maxMemoryAllocationCount = {}; uint32_t maxSamplerAllocationCount = {}; DeviceSize bufferImageGranularity = {}; DeviceSize sparseAddressSpaceSize = {}; uint32_t maxBoundDescriptorSets = {}; uint32_t maxPerStageDescriptorSamplers = {}; uint32_t maxPerStageDescriptorUniformBuffers = {}; uint32_t maxPerStageDescriptorStorageBuffers = {}; uint32_t maxPerStageDescriptorSampledImages = {}; uint32_t maxPerStageDescriptorStorageImages = {}; uint32_t maxPerStageDescriptorInputAttachments = {}; uint32_t maxPerStageResources = {}; uint32_t maxDescriptorSetSamplers = {}; uint32_t maxDescriptorSetUniformBuffers = {}; uint32_t maxDescriptorSetUniformBuffersDynamic = {}; uint32_t maxDescriptorSetStorageBuffers = {}; uint32_t maxDescriptorSetStorageBuffersDynamic = {}; uint32_t maxDescriptorSetSampledImages = {}; uint32_t maxDescriptorSetStorageImages = {}; uint32_t maxDescriptorSetInputAttachments = {}; uint32_t maxVertexInputAttributes = {}; uint32_t maxVertexInputBindings = {}; uint32_t maxVertexInputAttributeOffset = {}; uint32_t maxVertexInputBindingStride = {}; uint32_t maxVertexOutputComponents = {}; uint32_t maxTessellationGenerationLevel = {}; uint32_t maxTessellationPatchSize = {}; uint32_t maxTessellationControlPerVertexInputComponents = {}; uint32_t maxTessellationControlPerVertexOutputComponents = {}; uint32_t maxTessellationControlPerPatchOutputComponents = {}; uint32_t maxTessellationControlTotalOutputComponents = {}; uint32_t maxTessellationEvaluationInputComponents = {}; uint32_t maxTessellationEvaluationOutputComponents = {}; uint32_t maxGeometryShaderInvocations = {}; uint32_t maxGeometryInputComponents = {}; uint32_t maxGeometryOutputComponents = {}; uint32_t maxGeometryOutputVertices = {}; uint32_t maxGeometryTotalOutputComponents = {}; uint32_t maxFragmentInputComponents = {}; uint32_t maxFragmentOutputAttachments = {}; uint32_t maxFragmentDualSrcAttachments = {}; uint32_t maxFragmentCombinedOutputResources = {}; uint32_t maxComputeSharedMemorySize = {}; ArrayWrapper1D maxComputeWorkGroupCount = {}; uint32_t maxComputeWorkGroupInvocations = {}; ArrayWrapper1D maxComputeWorkGroupSize = {}; uint32_t subPixelPrecisionBits = {}; uint32_t subTexelPrecisionBits = {}; uint32_t mipmapPrecisionBits = {}; uint32_t maxDrawIndexedIndexValue = {}; uint32_t maxDrawIndirectCount = {}; float maxSamplerLodBias = {}; float maxSamplerAnisotropy = {}; uint32_t maxViewports = {}; ArrayWrapper1D maxViewportDimensions = {}; ArrayWrapper1D viewportBoundsRange = {}; uint32_t viewportSubPixelBits = {}; size_t minMemoryMapAlignment = {}; DeviceSize minTexelBufferOffsetAlignment = {}; DeviceSize minUniformBufferOffsetAlignment = {}; DeviceSize minStorageBufferOffsetAlignment = {}; int32_t minTexelOffset = {}; uint32_t maxTexelOffset = {}; int32_t minTexelGatherOffset = {}; uint32_t maxTexelGatherOffset = {}; float minInterpolationOffset = {}; float maxInterpolationOffset = {}; uint32_t subPixelInterpolationOffsetBits = {}; uint32_t maxFramebufferWidth = {}; uint32_t maxFramebufferHeight = {}; uint32_t maxFramebufferLayers = {}; SampleCountFlags framebufferColorSampleCounts = {}; SampleCountFlags framebufferDepthSampleCounts = {}; SampleCountFlags framebufferStencilSampleCounts = {}; SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; uint32_t maxColorAttachments = {}; SampleCountFlags sampledImageColorSampleCounts = {}; SampleCountFlags sampledImageIntegerSampleCounts = {}; SampleCountFlags sampledImageDepthSampleCounts = {}; SampleCountFlags sampledImageStencilSampleCounts = {}; SampleCountFlags storageImageSampleCounts = {}; uint32_t maxSampleMaskWords = {}; Bool32 timestampComputeAndGraphics = {}; float timestampPeriod = {}; uint32_t maxClipDistances = {}; uint32_t maxCullDistances = {}; uint32_t maxCombinedClipAndCullDistances = {}; uint32_t discreteQueuePriorities = {}; ArrayWrapper1D pointSizeRange = {}; ArrayWrapper1D lineWidthRange = {}; float pointSizeGranularity = {}; float lineWidthGranularity = {}; Bool32 strictLines = {}; Bool32 standardSampleLocations = {}; DeviceSize optimalBufferCopyOffsetAlignment = {}; DeviceSize optimalBufferCopyRowPitchAlignment = {}; DeviceSize nonCoherentAtomSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLimits; }; #endif // wrapper struct for struct VkPhysicalDeviceSparseProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceSparseProperties.html struct PhysicalDeviceSparseProperties { using NativeType = VkPhysicalDeviceSparseProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( Bool32 residencyStandard2DBlockShape_ = {}, Bool32 residencyStandard2DMultisampleBlockShape_ = {}, Bool32 residencyStandard3DBlockShape_ = {}, Bool32 residencyAlignedMipSize_ = {}, Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT : residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ } , residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ } , residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ } , residencyAlignedMipSize{ residencyAlignedMipSize_ } , residencyNonResidentStrict{ residencyNonResidentStrict_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceSparseProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceSparseProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( residencyStandard2DBlockShape, residencyStandard2DMultisampleBlockShape, residencyStandard3DBlockShape, residencyAlignedMipSize, residencyNonResidentStrict ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; #else bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); # endif } bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: Bool32 residencyStandard2DBlockShape = {}; Bool32 residencyStandard2DMultisampleBlockShape = {}; Bool32 residencyStandard3DBlockShape = {}; Bool32 residencyAlignedMipSize = {}; Bool32 residencyNonResidentStrict = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceSparseProperties; }; #endif // wrapper struct for struct VkPhysicalDeviceProperties, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties.html struct PhysicalDeviceProperties { using NativeType = VkPhysicalDeviceProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, PhysicalDeviceType deviceType_ = PhysicalDeviceType::eOther, std::array const & deviceName_ = {}, std::array const & pipelineCacheUUID_ = {}, PhysicalDeviceLimits limits_ = {}, PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT : apiVersion{ apiVersion_ } , driverVersion{ driverVersion_ } , vendorID{ vendorID_ } , deviceID{ deviceID_ } , deviceType{ deviceType_ } , deviceName{ deviceName_ } , pipelineCacheUUID{ pipelineCacheUUID_ } , limits{ limits_ } , sparseProperties{ sparseProperties_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &, PhysicalDeviceLimits const &, PhysicalDeviceSparseProperties const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) return cmp; if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) return cmp; if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) return cmp; if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) return cmp; if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) return cmp; if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) return cmp; if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) return cmp; if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) return cmp; return std::partial_ordering::equivalent; } #endif bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); } bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: uint32_t apiVersion = {}; uint32_t driverVersion = {}; uint32_t vendorID = {}; uint32_t deviceID = {}; PhysicalDeviceType deviceType = PhysicalDeviceType::eOther; ArrayWrapper1D deviceName = {}; ArrayWrapper1D pipelineCacheUUID = {}; PhysicalDeviceLimits limits = {}; PhysicalDeviceSparseProperties sparseProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProperties; }; #endif // wrapper struct for struct VkPhysicalDeviceProperties2, see https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProperties2.html struct PhysicalDeviceProperties2 { using NativeType = VkPhysicalDeviceProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties properties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , properties{ properties_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProperties2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProperties2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, properties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; #else bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); # endif } bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceProperties2; void * pNext = {}; PhysicalDeviceProperties properties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProperties2; }; #endif template <> struct CppType { using Type = PhysicalDeviceProperties2; }; using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; // wrapper struct for struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredApiVulkanPropertiesKHR.html struct PhysicalDeviceLayeredApiVulkanPropertiesKHR { using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceProperties2 properties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , properties{ properties_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, properties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); # endif } bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; void * pNext = {}; PhysicalDeviceProperties2 properties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceLayeredDriverPropertiesMSFT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLayeredDriverPropertiesMSFT.html struct PhysicalDeviceLayeredDriverPropertiesMSFT { using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( LayeredDriverUnderlyingApiMSFT underlyingAPI_ = LayeredDriverUnderlyingApiMSFT::eNone, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , underlyingAPI{ underlyingAPI_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLayeredDriverPropertiesMSFT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, underlyingAPI ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; #else bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); # endif } bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; void * pNext = {}; LayeredDriverUnderlyingApiMSFT underlyingAPI = LayeredDriverUnderlyingApiMSFT::eNone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; }; #endif template <> struct CppType { using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; }; // wrapper struct for struct VkPhysicalDeviceLegacyDitheringFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyDitheringFeaturesEXT.html struct PhysicalDeviceLegacyDitheringFeaturesEXT { using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( Bool32 legacyDithering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , legacyDithering{ legacyDithering_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( Bool32 legacyDithering_ ) & VULKAN_HPP_NOEXCEPT { legacyDithering = legacyDithering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT && setLegacyDithering( Bool32 legacyDithering_ ) && VULKAN_HPP_NOEXCEPT { legacyDithering = legacyDithering_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyDitheringFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, legacyDithering ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); # endif } bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; void * pNext = {}; Bool32 legacyDithering = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT.html struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT { using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( Bool32 legacyVertexAttributes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , legacyVertexAttributes{ legacyVertexAttributes_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setLegacyVertexAttributes( Bool32 legacyVertexAttributes_ ) & VULKAN_HPP_NOEXCEPT { legacyVertexAttributes = legacyVertexAttributes_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT && setLegacyVertexAttributes( Bool32 legacyVertexAttributes_ ) && VULKAN_HPP_NOEXCEPT { legacyVertexAttributes = legacyVertexAttributes_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, legacyVertexAttributes ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyVertexAttributes == rhs.legacyVertexAttributes ); # endif } bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; void * pNext = {}; Bool32 legacyVertexAttributes = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT.html struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT { using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( Bool32 nativeUnalignedPerformance_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , nativeUnalignedPerformance{ nativeUnalignedPerformance_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, nativeUnalignedPerformance ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance ); # endif } bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; void * pNext = {}; Bool32 nativeUnalignedPerformance = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceLineRasterizationFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationFeatures.html struct PhysicalDeviceLineRasterizationFeatures { using NativeType = VkPhysicalDeviceLineRasterizationFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( Bool32 rectangularLines_ = {}, Bool32 bresenhamLines_ = {}, Bool32 smoothLines_ = {}, Bool32 stippledRectangularLines_ = {}, Bool32 stippledBresenhamLines_ = {}, Bool32 stippledSmoothLines_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rectangularLines{ rectangularLines_ } , bresenhamLines{ bresenhamLines_ } , smoothLines{ smoothLines_ } , stippledRectangularLines{ stippledRectangularLines_ } , stippledBresenhamLines{ stippledBresenhamLines_ } , stippledSmoothLines{ stippledSmoothLines_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( Bool32 rectangularLines_ ) & VULKAN_HPP_NOEXCEPT { rectangularLines = rectangularLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setRectangularLines( Bool32 rectangularLines_ ) && VULKAN_HPP_NOEXCEPT { rectangularLines = rectangularLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( Bool32 bresenhamLines_ ) & VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setBresenhamLines( Bool32 bresenhamLines_ ) && VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( Bool32 smoothLines_ ) & VULKAN_HPP_NOEXCEPT { smoothLines = smoothLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setSmoothLines( Bool32 smoothLines_ ) && VULKAN_HPP_NOEXCEPT { smoothLines = smoothLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledRectangularLines( Bool32 stippledRectangularLines_ ) & VULKAN_HPP_NOEXCEPT { stippledRectangularLines = stippledRectangularLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setStippledRectangularLines( Bool32 stippledRectangularLines_ ) && VULKAN_HPP_NOEXCEPT { stippledRectangularLines = stippledRectangularLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledBresenhamLines( Bool32 stippledBresenhamLines_ ) & VULKAN_HPP_NOEXCEPT { stippledBresenhamLines = stippledBresenhamLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setStippledBresenhamLines( Bool32 stippledBresenhamLines_ ) && VULKAN_HPP_NOEXCEPT { stippledBresenhamLines = stippledBresenhamLines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledSmoothLines( Bool32 stippledSmoothLines_ ) & VULKAN_HPP_NOEXCEPT { stippledSmoothLines = stippledSmoothLines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures && setStippledSmoothLines( Bool32 stippledSmoothLines_ ) && VULKAN_HPP_NOEXCEPT { stippledSmoothLines = stippledSmoothLines_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rectangularLines, bresenhamLines, smoothLines, stippledRectangularLines, stippledBresenhamLines, stippledSmoothLines ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default; #else bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && ( stippledRectangularLines == rhs.stippledRectangularLines ) && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && ( stippledSmoothLines == rhs.stippledSmoothLines ); # endif } bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeatures; void * pNext = {}; Bool32 rectangularLines = {}; Bool32 bresenhamLines = {}; Bool32 smoothLines = {}; Bool32 stippledRectangularLines = {}; Bool32 stippledBresenhamLines = {}; Bool32 stippledSmoothLines = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLineRasterizationFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceLineRasterizationFeatures; }; using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; // wrapper struct for struct VkPhysicalDeviceLineRasterizationProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLineRasterizationProperties.html struct PhysicalDeviceLineRasterizationProperties { using NativeType = VkPhysicalDeviceLineRasterizationProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLineRasterizationProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLineRasterizationProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, lineSubPixelPrecisionBits ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default; #else bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); # endif } bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLineRasterizationProperties; void * pNext = {}; uint32_t lineSubPixelPrecisionBits = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLineRasterizationProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceLineRasterizationProperties; }; using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; // wrapper struct for struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceLinearColorAttachmentFeaturesNV.html struct PhysicalDeviceLinearColorAttachmentFeaturesNV { using NativeType = VkPhysicalDeviceLinearColorAttachmentFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( Bool32 linearColorAttachment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , linearColorAttachment{ linearColorAttachment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceLinearColorAttachmentFeaturesNV( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceLinearColorAttachmentFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceLinearColorAttachmentFeaturesNV & operator=( VkPhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV & setLinearColorAttachment( Bool32 linearColorAttachment_ ) & VULKAN_HPP_NOEXCEPT { linearColorAttachment = linearColorAttachment_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLinearColorAttachmentFeaturesNV && setLinearColorAttachment( Bool32 linearColorAttachment_ ) && VULKAN_HPP_NOEXCEPT { linearColorAttachment = linearColorAttachment_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, linearColorAttachment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceLinearColorAttachmentFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( linearColorAttachment == rhs.linearColorAttachment ); # endif } bool operator!=( PhysicalDeviceLinearColorAttachmentFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceLinearColorAttachmentFeaturesNV; void * pNext = {}; Bool32 linearColorAttachment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceLinearColorAttachmentFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceMaintenance10FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance10FeaturesKHR.html struct PhysicalDeviceMaintenance10FeaturesKHR { using NativeType = VkPhysicalDeviceMaintenance10FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance10FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10FeaturesKHR( Bool32 maintenance10_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance10{ maintenance10_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10FeaturesKHR( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance10FeaturesKHR( VkPhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance10FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance10FeaturesKHR & operator=( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance10FeaturesKHR & operator=( VkPhysicalDeviceMaintenance10FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR & setMaintenance10( Bool32 maintenance10_ ) & VULKAN_HPP_NOEXCEPT { maintenance10 = maintenance10_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance10FeaturesKHR && setMaintenance10( Bool32 maintenance10_ ) && VULKAN_HPP_NOEXCEPT { maintenance10 = maintenance10_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance10FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance10 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance10FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance10 == rhs.maintenance10 ); # endif } bool operator!=( PhysicalDeviceMaintenance10FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance10FeaturesKHR; void * pNext = {}; Bool32 maintenance10 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance10FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance10FeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance10PropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance10PropertiesKHR.html struct PhysicalDeviceMaintenance10PropertiesKHR { using NativeType = VkPhysicalDeviceMaintenance10PropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance10PropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10PropertiesKHR( Bool32 rgba4OpaqueBlackSwizzled_ = {}, Bool32 resolveSrgbFormatAppliesTransferFunction_ = {}, Bool32 resolveSrgbFormatSupportsTransferFunctionControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rgba4OpaqueBlackSwizzled{ rgba4OpaqueBlackSwizzled_ } , resolveSrgbFormatAppliesTransferFunction{ resolveSrgbFormatAppliesTransferFunction_ } , resolveSrgbFormatSupportsTransferFunctionControl{ resolveSrgbFormatSupportsTransferFunctionControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance10PropertiesKHR( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance10PropertiesKHR( VkPhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance10PropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance10PropertiesKHR & operator=( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance10PropertiesKHR & operator=( VkPhysicalDeviceMaintenance10PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance10PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10PropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance10PropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rgba4OpaqueBlackSwizzled, resolveSrgbFormatAppliesTransferFunction, resolveSrgbFormatSupportsTransferFunctionControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance10PropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rgba4OpaqueBlackSwizzled == rhs.rgba4OpaqueBlackSwizzled ) && ( resolveSrgbFormatAppliesTransferFunction == rhs.resolveSrgbFormatAppliesTransferFunction ) && ( resolveSrgbFormatSupportsTransferFunctionControl == rhs.resolveSrgbFormatSupportsTransferFunctionControl ); # endif } bool operator!=( PhysicalDeviceMaintenance10PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance10PropertiesKHR; void * pNext = {}; Bool32 rgba4OpaqueBlackSwizzled = {}; Bool32 resolveSrgbFormatAppliesTransferFunction = {}; Bool32 resolveSrgbFormatSupportsTransferFunctionControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance10PropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance10PropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance3Properties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance3Properties.html struct PhysicalDeviceMaintenance3Properties { using NativeType = VkPhysicalDeviceMaintenance3Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxPerSetDescriptors{ maxPerSetDescriptors_ } , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance3Properties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance3Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance3Properties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance3Properties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxPerSetDescriptors, maxMemoryAllocationSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance3Properties const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); # endif } bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; void * pNext = {}; uint32_t maxPerSetDescriptors = {}; DeviceSize maxMemoryAllocationSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance3Properties; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance3Properties; }; using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; // wrapper struct for struct VkPhysicalDeviceMaintenance4Features, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Features.html struct PhysicalDeviceMaintenance4Features { using NativeType = VkPhysicalDeviceMaintenance4Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Features; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance4{ maintenance4_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Features( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance4Features( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance4Features & operator=( PhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance4Features & operator=( VkPhysicalDeviceMaintenance4Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features & setMaintenance4( Bool32 maintenance4_ ) & VULKAN_HPP_NOEXCEPT { maintenance4 = maintenance4_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance4Features && setMaintenance4( Bool32 maintenance4_ ) && VULKAN_HPP_NOEXCEPT { maintenance4 = maintenance4_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Features const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Features *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance4 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance4Features const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance4 == rhs.maintenance4 ); # endif } bool operator!=( PhysicalDeviceMaintenance4Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance4Features; void * pNext = {}; Bool32 maintenance4 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance4Features; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance4Features; }; using PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features; // wrapper struct for struct VkPhysicalDeviceMaintenance4Properties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance4Properties.html struct PhysicalDeviceMaintenance4Properties { using NativeType = VkPhysicalDeviceMaintenance4Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance4Properties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxBufferSize{ maxBufferSize_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance4Properties( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance4Properties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance4Properties & operator=( PhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance4Properties & operator=( VkPhysicalDeviceMaintenance4Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance4Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Properties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance4Properties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxBufferSize ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance4Properties const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxBufferSize == rhs.maxBufferSize ); # endif } bool operator!=( PhysicalDeviceMaintenance4Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance4Properties; void * pNext = {}; DeviceSize maxBufferSize = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance4Properties; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance4Properties; }; using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; // wrapper struct for struct VkPhysicalDeviceMaintenance5Features, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Features.html struct PhysicalDeviceMaintenance5Features { using NativeType = VkPhysicalDeviceMaintenance5Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Features; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance5{ maintenance5_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance5Features( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( Bool32 maintenance5_ ) & VULKAN_HPP_NOEXCEPT { maintenance5 = maintenance5_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features && setMaintenance5( Bool32 maintenance5_ ) && VULKAN_HPP_NOEXCEPT { maintenance5 = maintenance5_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Features const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Features *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance5 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance5 == rhs.maintenance5 ); # endif } bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance5Features; void * pNext = {}; Bool32 maintenance5 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance5Features; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance5Features; }; using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; // wrapper struct for struct VkPhysicalDeviceMaintenance5Properties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance5Properties.html struct PhysicalDeviceMaintenance5Properties { using NativeType = VkPhysicalDeviceMaintenance5Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Properties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, Bool32 depthStencilSwizzleOneSupport_ = {}, Bool32 polygonModePointSize_ = {}, Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, Bool32 nonStrictWideLinesUseParallelogram_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } , polygonModePointSize{ polygonModePointSize_ } , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance5Properties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Properties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance5Properties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, earlyFragmentMultisampleCoverageAfterSampleCounting, earlyFragmentSampleMaskTestBeforeSampleCounting, depthStencilSwizzleOneSupport, polygonModePointSize, nonStrictSinglePixelWideLinesUseParallelogram, nonStrictWideLinesUseParallelogram ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ); # endif } bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance5Properties; void * pNext = {}; Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; Bool32 depthStencilSwizzleOneSupport = {}; Bool32 polygonModePointSize = {}; Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; Bool32 nonStrictWideLinesUseParallelogram = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance5Properties; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance5Properties; }; using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; // wrapper struct for struct VkPhysicalDeviceMaintenance6Features, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Features.html struct PhysicalDeviceMaintenance6Features { using NativeType = VkPhysicalDeviceMaintenance6Features; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Features; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance6{ maintenance6_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance6Features( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( Bool32 maintenance6_ ) & VULKAN_HPP_NOEXCEPT { maintenance6 = maintenance6_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features && setMaintenance6( Bool32 maintenance6_ ) && VULKAN_HPP_NOEXCEPT { maintenance6 = maintenance6_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Features const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Features *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance6 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance6 == rhs.maintenance6 ); # endif } bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance6Features; void * pNext = {}; Bool32 maintenance6 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance6Features; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance6Features; }; using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; // wrapper struct for struct VkPhysicalDeviceMaintenance6Properties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance6Properties.html struct PhysicalDeviceMaintenance6Properties { using NativeType = VkPhysicalDeviceMaintenance6Properties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Properties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, Bool32 fragmentShadingRateClampCombinerInputs_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance6Properties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Properties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance6Properties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, blockTexelViewCompatibleMultipleLayers, maxCombinedImageSamplerDescriptorCount, fragmentShadingRateClampCombinerInputs ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ); # endif } bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance6Properties; void * pNext = {}; Bool32 blockTexelViewCompatibleMultipleLayers = {}; uint32_t maxCombinedImageSamplerDescriptorCount = {}; Bool32 fragmentShadingRateClampCombinerInputs = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance6Properties; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance6Properties; }; using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; // wrapper struct for struct VkPhysicalDeviceMaintenance7FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7FeaturesKHR.html struct PhysicalDeviceMaintenance7FeaturesKHR { using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( Bool32 maintenance7_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance7{ maintenance7_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( Bool32 maintenance7_ ) & VULKAN_HPP_NOEXCEPT { maintenance7 = maintenance7_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR && setMaintenance7( Bool32 maintenance7_ ) && VULKAN_HPP_NOEXCEPT { maintenance7 = maintenance7_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance7 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance7 == rhs.maintenance7 ); # endif } bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; void * pNext = {}; Bool32 maintenance7 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance7FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance7FeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance7PropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance7PropertiesKHR.html struct PhysicalDeviceMaintenance7PropertiesKHR { using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( Bool32 robustFragmentShadingRateAttachmentAccess_ = {}, Bool32 separateDepthStencilAttachmentAccess_ = {}, uint32_t maxDescriptorSetTotalUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetTotalStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetTotalBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ } , separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ } , maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ } , maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ } , maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ } , maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ } , maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ } , maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance7PropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, robustFragmentShadingRateAttachmentAccess, separateDepthStencilAttachmentAccess, maxDescriptorSetTotalUniformBuffersDynamic, maxDescriptorSetTotalStorageBuffersDynamic, maxDescriptorSetTotalBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic, maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess ) && ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess ) && ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic ) && ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic ) && ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic ) && ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ) && ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ) && ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); # endif } bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; void * pNext = {}; Bool32 robustFragmentShadingRateAttachmentAccess = {}; Bool32 separateDepthStencilAttachmentAccess = {}; uint32_t maxDescriptorSetTotalUniformBuffersDynamic = {}; uint32_t maxDescriptorSetTotalStorageBuffersDynamic = {}; uint32_t maxDescriptorSetTotalBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {}; uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance7PropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance7PropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance8FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance8FeaturesKHR.html struct PhysicalDeviceMaintenance8FeaturesKHR { using NativeType = VkPhysicalDeviceMaintenance8FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( Bool32 maintenance8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance8{ maintenance8_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance8FeaturesKHR( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance8FeaturesKHR( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance8FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance8FeaturesKHR & operator=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance8FeaturesKHR & operator=( VkPhysicalDeviceMaintenance8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR & setMaintenance8( Bool32 maintenance8_ ) & VULKAN_HPP_NOEXCEPT { maintenance8 = maintenance8_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance8FeaturesKHR && setMaintenance8( Bool32 maintenance8_ ) && VULKAN_HPP_NOEXCEPT { maintenance8 = maintenance8_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance8FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance8FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance8FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance8 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance8FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance8 == rhs.maintenance8 ); # endif } bool operator!=( PhysicalDeviceMaintenance8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance8FeaturesKHR; void * pNext = {}; Bool32 maintenance8 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance8FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance8FeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance9FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9FeaturesKHR.html struct PhysicalDeviceMaintenance9FeaturesKHR { using NativeType = VkPhysicalDeviceMaintenance9FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR( Bool32 maintenance9_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance9{ maintenance9_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9FeaturesKHR( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance9FeaturesKHR( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance9FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance9FeaturesKHR & operator=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance9FeaturesKHR & operator=( VkPhysicalDeviceMaintenance9FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR & setMaintenance9( Bool32 maintenance9_ ) & VULKAN_HPP_NOEXCEPT { maintenance9 = maintenance9_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance9FeaturesKHR && setMaintenance9( Bool32 maintenance9_ ) && VULKAN_HPP_NOEXCEPT { maintenance9 = maintenance9_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMaintenance9FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maintenance9 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance9FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance9 == rhs.maintenance9 ); # endif } bool operator!=( PhysicalDeviceMaintenance9FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance9FeaturesKHR; void * pNext = {}; Bool32 maintenance9 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance9FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance9FeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceMaintenance9PropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMaintenance9PropertiesKHR.html struct PhysicalDeviceMaintenance9PropertiesKHR { using NativeType = VkPhysicalDeviceMaintenance9PropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9PropertiesKHR( Bool32 image2DViewOf3DSparse_ = {}, DefaultVertexAttributeValueKHR defaultVertexAttributeValue_ = DefaultVertexAttributeValueKHR::eZeroZeroZeroZero, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image2DViewOf3DSparse{ image2DViewOf3DSparse_ } , defaultVertexAttributeValue{ defaultVertexAttributeValue_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance9PropertiesKHR( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMaintenance9PropertiesKHR( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMaintenance9PropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMaintenance9PropertiesKHR & operator=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMaintenance9PropertiesKHR & operator=( VkPhysicalDeviceMaintenance9PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMaintenance9PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9PropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9PropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMaintenance9PropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, image2DViewOf3DSparse, defaultVertexAttributeValue ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMaintenance9PropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( image2DViewOf3DSparse == rhs.image2DViewOf3DSparse ) && ( defaultVertexAttributeValue == rhs.defaultVertexAttributeValue ); # endif } bool operator!=( PhysicalDeviceMaintenance9PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMaintenance9PropertiesKHR; void * pNext = {}; Bool32 image2DViewOf3DSparse = {}; DefaultVertexAttributeValueKHR defaultVertexAttributeValue = DefaultVertexAttributeValueKHR::eZeroZeroZeroZero; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMaintenance9PropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceMaintenance9PropertiesKHR; }; // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedFeaturesEXT.html struct PhysicalDeviceMapMemoryPlacedFeaturesEXT { using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( Bool32 memoryMapPlaced_ = {}, Bool32 memoryMapRangePlaced_ = {}, Bool32 memoryUnmapReserve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryMapPlaced{ memoryMapPlaced_ } , memoryMapRangePlaced{ memoryMapRangePlaced_ } , memoryUnmapReserve{ memoryUnmapReserve_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedFeaturesEXT( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMapMemoryPlacedFeaturesEXT( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMapMemoryPlacedFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMapMemoryPlacedFeaturesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapPlaced( Bool32 memoryMapPlaced_ ) & VULKAN_HPP_NOEXCEPT { memoryMapPlaced = memoryMapPlaced_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT && setMemoryMapPlaced( Bool32 memoryMapPlaced_ ) && VULKAN_HPP_NOEXCEPT { memoryMapPlaced = memoryMapPlaced_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryMapRangePlaced( Bool32 memoryMapRangePlaced_ ) & VULKAN_HPP_NOEXCEPT { memoryMapRangePlaced = memoryMapRangePlaced_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT && setMemoryMapRangePlaced( Bool32 memoryMapRangePlaced_ ) && VULKAN_HPP_NOEXCEPT { memoryMapRangePlaced = memoryMapRangePlaced_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT & setMemoryUnmapReserve( Bool32 memoryUnmapReserve_ ) & VULKAN_HPP_NOEXCEPT { memoryUnmapReserve = memoryUnmapReserve_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMapMemoryPlacedFeaturesEXT && setMemoryUnmapReserve( Bool32 memoryUnmapReserve_ ) && VULKAN_HPP_NOEXCEPT { memoryUnmapReserve = memoryUnmapReserve_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryMapPlaced, memoryMapRangePlaced, memoryUnmapReserve ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryMapPlaced == rhs.memoryMapPlaced ) && ( memoryMapRangePlaced == rhs.memoryMapRangePlaced ) && ( memoryUnmapReserve == rhs.memoryUnmapReserve ); # endif } bool operator!=( PhysicalDeviceMapMemoryPlacedFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT; void * pNext = {}; Bool32 memoryMapPlaced = {}; Bool32 memoryMapRangePlaced = {}; Bool32 memoryUnmapReserve = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMapMemoryPlacedFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMapMemoryPlacedPropertiesEXT.html struct PhysicalDeviceMapMemoryPlacedPropertiesEXT { using NativeType = VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( DeviceSize minPlacedMemoryMapAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMapMemoryPlacedPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minPlacedMemoryMapAlignment ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minPlacedMemoryMapAlignment == rhs.minPlacedMemoryMapAlignment ); # endif } bool operator!=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT; void * pNext = {}; DeviceSize minPlacedMemoryMapAlignment = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMapMemoryPlacedPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceMemoryBudgetPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html struct PhysicalDeviceMemoryBudgetPropertiesEXT { using NativeType = VkPhysicalDeviceMemoryBudgetPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, std::array const & heapUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , heapBudget{ heapBudget_ } , heapUsage{ heapUsage_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryBudgetPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, heapBudget, heapUsage ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( heapBudget == rhs.heapBudget ) && ( heapUsage == rhs.heapUsage ); # endif } bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; void * pNext = {}; ArrayWrapper1D heapBudget = {}; ArrayWrapper1D heapUsage = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMemoryBudgetPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionFeaturesEXT.html struct PhysicalDeviceMemoryDecompressionFeaturesEXT { using NativeType = VkPhysicalDeviceMemoryDecompressionFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesEXT( Bool32 memoryDecompression_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryDecompression{ memoryDecompression_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesEXT( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryDecompressionFeaturesEXT( VkPhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryDecompressionFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryDecompressionFeaturesEXT & operator=( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryDecompressionFeaturesEXT & operator=( VkPhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT & setMemoryDecompression( Bool32 memoryDecompression_ ) & VULKAN_HPP_NOEXCEPT { memoryDecompression = memoryDecompression_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryDecompressionFeaturesEXT && setMemoryDecompression( Bool32 memoryDecompression_ ) && VULKAN_HPP_NOEXCEPT { memoryDecompression = memoryDecompression_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryDecompression ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMemoryDecompressionFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryDecompression == rhs.memoryDecompression ); # endif } bool operator!=( PhysicalDeviceMemoryDecompressionFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionFeaturesEXT; void * pNext = {}; Bool32 memoryDecompression = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryDecompressionFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMemoryDecompressionFeaturesEXT; }; using PhysicalDeviceMemoryDecompressionFeaturesNV = PhysicalDeviceMemoryDecompressionFeaturesEXT; // wrapper struct for struct VkPhysicalDeviceMemoryDecompressionPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryDecompressionPropertiesEXT.html struct PhysicalDeviceMemoryDecompressionPropertiesEXT { using NativeType = VkPhysicalDeviceMemoryDecompressionPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesEXT( MemoryDecompressionMethodFlagsEXT decompressionMethods_ = {}, uint64_t maxDecompressionIndirectCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , decompressionMethods{ decompressionMethods_ } , maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesEXT( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryDecompressionPropertiesEXT( VkPhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryDecompressionPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryDecompressionPropertiesEXT & operator=( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryDecompressionPropertiesEXT & operator=( VkPhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryDecompressionPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, decompressionMethods, maxDecompressionIndirectCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMemoryDecompressionPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( decompressionMethods == rhs.decompressionMethods ) && ( maxDecompressionIndirectCount == rhs.maxDecompressionIndirectCount ); # endif } bool operator!=( PhysicalDeviceMemoryDecompressionPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMemoryDecompressionPropertiesEXT; void * pNext = {}; MemoryDecompressionMethodFlagsEXT decompressionMethods = {}; uint64_t maxDecompressionIndirectCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryDecompressionPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMemoryDecompressionPropertiesEXT; }; using PhysicalDeviceMemoryDecompressionPropertiesNV = PhysicalDeviceMemoryDecompressionPropertiesEXT; // wrapper struct for struct VkPhysicalDeviceMemoryPriorityFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html struct PhysicalDeviceMemoryPriorityFeaturesEXT { using NativeType = VkPhysicalDeviceMemoryPriorityFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( Bool32 memoryPriority_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryPriority{ memoryPriority_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( Bool32 memoryPriority_ ) & VULKAN_HPP_NOEXCEPT { memoryPriority = memoryPriority_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT && setMemoryPriority( Bool32 memoryPriority_ ) && VULKAN_HPP_NOEXCEPT { memoryPriority = memoryPriority_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryPriorityFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryPriority ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryPriority == rhs.memoryPriority ); # endif } bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; void * pNext = {}; Bool32 memoryPriority = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMemoryPriorityFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceMemoryProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties.html struct PhysicalDeviceMemoryProperties { using NativeType = VkPhysicalDeviceMemoryProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, std::array const & memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT : memoryTypeCount{ memoryTypeCount_ } , memoryTypes{ memoryTypes_ } , memoryHeapCount{ memoryHeapCount_ } , memoryHeaps{ memoryHeaps_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, ArrayWrapper1D const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( memoryTypeCount, memoryTypes, memoryHeapCount, memoryHeaps ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = memoryTypeCount <=> rhs.memoryTypeCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < memoryTypeCount; ++i ) { if ( auto cmp = memoryTypes[i] <=> rhs.memoryTypes[i]; cmp != 0 ) return cmp; } if ( auto cmp = memoryHeapCount <=> rhs.memoryHeapCount; cmp != 0 ) return cmp; for ( size_t i = 0; i < memoryHeapCount; ++i ) { if ( auto cmp = memoryHeaps[i] <=> rhs.memoryHeaps[i]; cmp != 0 ) return cmp; } return std::strong_ordering::equivalent; } #endif bool operator==( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( memoryTypeCount == rhs.memoryTypeCount ) && ( memcmp( memoryTypes, rhs.memoryTypes, memoryTypeCount * sizeof( MemoryType ) ) == 0 ) && ( memoryHeapCount == rhs.memoryHeapCount ) && ( memcmp( memoryHeaps, rhs.memoryHeaps, memoryHeapCount * sizeof( MemoryHeap ) ) == 0 ); } bool operator!=( PhysicalDeviceMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: uint32_t memoryTypeCount = {}; ArrayWrapper1D memoryTypes = {}; uint32_t memoryHeapCount = {}; ArrayWrapper1D memoryHeaps = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryProperties; }; #endif // wrapper struct for struct VkPhysicalDeviceMemoryProperties2, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMemoryProperties2.html struct PhysicalDeviceMemoryProperties2 { using NativeType = VkPhysicalDeviceMemoryProperties2; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties memoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , memoryProperties{ memoryProperties_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMemoryProperties2( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMemoryProperties2 const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties2 const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMemoryProperties2 *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, memoryProperties ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMemoryProperties2 const & ) const = default; #else bool operator==( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memoryProperties == rhs.memoryProperties ); # endif } bool operator!=( PhysicalDeviceMemoryProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; void * pNext = {}; PhysicalDeviceMemoryProperties memoryProperties = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMemoryProperties2; }; #endif template <> struct CppType { using Type = PhysicalDeviceMemoryProperties2; }; using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesEXT.html struct PhysicalDeviceMeshShaderFeaturesEXT { using NativeType = VkPhysicalDeviceMeshShaderFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( Bool32 taskShader_ = {}, Bool32 meshShader_ = {}, Bool32 multiviewMeshShader_ = {}, Bool32 primitiveFragmentShadingRateMeshShader_ = {}, Bool32 meshShaderQueries_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , taskShader{ taskShader_ } , meshShader{ meshShader_ } , multiviewMeshShader{ multiviewMeshShader_ } , primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ } , meshShaderQueries{ meshShaderQueries_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesEXT( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderFeaturesEXT( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMeshShaderFeaturesEXT & operator=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMeshShaderFeaturesEXT & operator=( VkPhysicalDeviceMeshShaderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setTaskShader( Bool32 taskShader_ ) & VULKAN_HPP_NOEXCEPT { taskShader = taskShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setTaskShader( Bool32 taskShader_ ) && VULKAN_HPP_NOEXCEPT { taskShader = taskShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShader( Bool32 meshShader_ ) & VULKAN_HPP_NOEXCEPT { meshShader = meshShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setMeshShader( Bool32 meshShader_ ) && VULKAN_HPP_NOEXCEPT { meshShader = meshShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMultiviewMeshShader( Bool32 multiviewMeshShader_ ) & VULKAN_HPP_NOEXCEPT { multiviewMeshShader = multiviewMeshShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setMultiviewMeshShader( Bool32 multiviewMeshShader_ ) && VULKAN_HPP_NOEXCEPT { multiviewMeshShader = multiviewMeshShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setPrimitiveFragmentShadingRateMeshShader( Bool32 primitiveFragmentShadingRateMeshShader_ ) & VULKAN_HPP_NOEXCEPT { primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setPrimitiveFragmentShadingRateMeshShader( Bool32 primitiveFragmentShadingRateMeshShader_ ) && VULKAN_HPP_NOEXCEPT { primitiveFragmentShadingRateMeshShader = primitiveFragmentShadingRateMeshShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT & setMeshShaderQueries( Bool32 meshShaderQueries_ ) & VULKAN_HPP_NOEXCEPT { meshShaderQueries = meshShaderQueries_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesEXT && setMeshShaderQueries( Bool32 meshShaderQueries_ ) && VULKAN_HPP_NOEXCEPT { meshShaderQueries = meshShaderQueries_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMeshShaderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, taskShader, meshShader, multiviewMeshShader, primitiveFragmentShadingRateMeshShader, meshShaderQueries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMeshShaderFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ) && ( multiviewMeshShader == rhs.multiviewMeshShader ) && ( primitiveFragmentShadingRateMeshShader == rhs.primitiveFragmentShadingRateMeshShader ) && ( meshShaderQueries == rhs.meshShaderQueries ); # endif } bool operator!=( PhysicalDeviceMeshShaderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesEXT; void * pNext = {}; Bool32 taskShader = {}; Bool32 meshShader = {}; Bool32 multiviewMeshShader = {}; Bool32 primitiveFragmentShadingRateMeshShader = {}; Bool32 meshShaderQueries = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMeshShaderFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMeshShaderFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceMeshShaderFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html struct PhysicalDeviceMeshShaderFeaturesNV { using NativeType = VkPhysicalDeviceMeshShaderFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( Bool32 taskShader_ = {}, Bool32 meshShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , taskShader{ taskShader_ } , meshShader{ meshShader_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( Bool32 taskShader_ ) & VULKAN_HPP_NOEXCEPT { taskShader = taskShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV && setTaskShader( Bool32 taskShader_ ) && VULKAN_HPP_NOEXCEPT { taskShader = taskShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( Bool32 meshShader_ ) & VULKAN_HPP_NOEXCEPT { meshShader = meshShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV && setMeshShader( Bool32 meshShader_ ) && VULKAN_HPP_NOEXCEPT { meshShader = meshShader_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, taskShader, meshShader ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( taskShader == rhs.taskShader ) && ( meshShader == rhs.meshShader ); # endif } bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; void * pNext = {}; Bool32 taskShader = {}; Bool32 meshShader = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMeshShaderFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceMeshShaderFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesEXT.html struct PhysicalDeviceMeshShaderPropertiesEXT { using NativeType = VkPhysicalDeviceMeshShaderPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( uint32_t maxTaskWorkGroupTotalCount_ = {}, std::array const & maxTaskWorkGroupCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskPayloadSize_ = {}, uint32_t maxTaskSharedMemorySize_ = {}, uint32_t maxTaskPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshWorkGroupTotalCount_ = {}, std::array const & maxMeshWorkGroupCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshSharedMemorySize_ = {}, uint32_t maxMeshPayloadAndSharedMemorySize_ = {}, uint32_t maxMeshOutputMemorySize_ = {}, uint32_t maxMeshPayloadAndOutputMemorySize_ = {}, uint32_t maxMeshOutputComponents_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshOutputLayers_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, uint32_t maxPreferredTaskWorkGroupInvocations_ = {}, uint32_t maxPreferredMeshWorkGroupInvocations_ = {}, Bool32 prefersLocalInvocationVertexOutput_ = {}, Bool32 prefersLocalInvocationPrimitiveOutput_ = {}, Bool32 prefersCompactVertexOutput_ = {}, Bool32 prefersCompactPrimitiveOutput_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ } , maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ } , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } , maxTaskPayloadSize{ maxTaskPayloadSize_ } , maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ } , maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ } , maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ } , maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ } , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } , maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ } , maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ } , maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ } , maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ } , maxMeshOutputComponents{ maxMeshOutputComponents_ } , maxMeshOutputVertices{ maxMeshOutputVertices_ } , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } , maxMeshOutputLayers{ maxMeshOutputLayers_ } , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } , maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ } , maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ } , prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ } , prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ } , prefersCompactVertexOutput{ prefersCompactVertexOutput_ } , prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesEXT( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderPropertiesEXT( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMeshShaderPropertiesEXT & operator=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMeshShaderPropertiesEXT & operator=( VkPhysicalDeviceMeshShaderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMeshShaderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, ArrayWrapper1D const &, uint32_t const &, ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, Bool32 const &, Bool32 const &, Bool32 const &, Bool32 const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxTaskWorkGroupTotalCount, maxTaskWorkGroupCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskPayloadSize, maxTaskSharedMemorySize, maxTaskPayloadAndSharedMemorySize, maxMeshWorkGroupTotalCount, maxMeshWorkGroupCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshSharedMemorySize, maxMeshPayloadAndSharedMemorySize, maxMeshOutputMemorySize, maxMeshPayloadAndOutputMemorySize, maxMeshOutputComponents, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshOutputLayers, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity, maxPreferredTaskWorkGroupInvocations, maxPreferredMeshWorkGroupInvocations, prefersLocalInvocationVertexOutput, prefersLocalInvocationPrimitiveOutput, prefersCompactVertexOutput, prefersCompactPrimitiveOutput ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMeshShaderPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxTaskWorkGroupTotalCount == rhs.maxTaskWorkGroupTotalCount ) && ( maxTaskWorkGroupCount == rhs.maxTaskWorkGroupCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskPayloadSize == rhs.maxTaskPayloadSize ) && ( maxTaskSharedMemorySize == rhs.maxTaskSharedMemorySize ) && ( maxTaskPayloadAndSharedMemorySize == rhs.maxTaskPayloadAndSharedMemorySize ) && ( maxMeshWorkGroupTotalCount == rhs.maxMeshWorkGroupTotalCount ) && ( maxMeshWorkGroupCount == rhs.maxMeshWorkGroupCount ) && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && ( maxMeshSharedMemorySize == rhs.maxMeshSharedMemorySize ) && ( maxMeshPayloadAndSharedMemorySize == rhs.maxMeshPayloadAndSharedMemorySize ) && ( maxMeshOutputMemorySize == rhs.maxMeshOutputMemorySize ) && ( maxMeshPayloadAndOutputMemorySize == rhs.maxMeshPayloadAndOutputMemorySize ) && ( maxMeshOutputComponents == rhs.maxMeshOutputComponents ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshOutputLayers == rhs.maxMeshOutputLayers ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ) && ( maxPreferredTaskWorkGroupInvocations == rhs.maxPreferredTaskWorkGroupInvocations ) && ( maxPreferredMeshWorkGroupInvocations == rhs.maxPreferredMeshWorkGroupInvocations ) && ( prefersLocalInvocationVertexOutput == rhs.prefersLocalInvocationVertexOutput ) && ( prefersLocalInvocationPrimitiveOutput == rhs.prefersLocalInvocationPrimitiveOutput ) && ( prefersCompactVertexOutput == rhs.prefersCompactVertexOutput ) && ( prefersCompactPrimitiveOutput == rhs.prefersCompactPrimitiveOutput ); # endif } bool operator!=( PhysicalDeviceMeshShaderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesEXT; void * pNext = {}; uint32_t maxTaskWorkGroupTotalCount = {}; ArrayWrapper1D maxTaskWorkGroupCount = {}; uint32_t maxTaskWorkGroupInvocations = {}; ArrayWrapper1D maxTaskWorkGroupSize = {}; uint32_t maxTaskPayloadSize = {}; uint32_t maxTaskSharedMemorySize = {}; uint32_t maxTaskPayloadAndSharedMemorySize = {}; uint32_t maxMeshWorkGroupTotalCount = {}; ArrayWrapper1D maxMeshWorkGroupCount = {}; uint32_t maxMeshWorkGroupInvocations = {}; ArrayWrapper1D maxMeshWorkGroupSize = {}; uint32_t maxMeshSharedMemorySize = {}; uint32_t maxMeshPayloadAndSharedMemorySize = {}; uint32_t maxMeshOutputMemorySize = {}; uint32_t maxMeshPayloadAndOutputMemorySize = {}; uint32_t maxMeshOutputComponents = {}; uint32_t maxMeshOutputVertices = {}; uint32_t maxMeshOutputPrimitives = {}; uint32_t maxMeshOutputLayers = {}; uint32_t maxMeshMultiviewViewCount = {}; uint32_t meshOutputPerVertexGranularity = {}; uint32_t meshOutputPerPrimitiveGranularity = {}; uint32_t maxPreferredTaskWorkGroupInvocations = {}; uint32_t maxPreferredMeshWorkGroupInvocations = {}; Bool32 prefersLocalInvocationVertexOutput = {}; Bool32 prefersLocalInvocationPrimitiveOutput = {}; Bool32 prefersCompactVertexOutput = {}; Bool32 prefersCompactPrimitiveOutput = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMeshShaderPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMeshShaderPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceMeshShaderPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html struct PhysicalDeviceMeshShaderPropertiesNV { using NativeType = VkPhysicalDeviceMeshShaderPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array const & maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array const & maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ } , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } , maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ } , maxTaskOutputCount{ maxTaskOutputCount_ } , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } , maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ } , maxMeshOutputVertices{ maxMeshOutputVertices_ } , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } { } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMeshShaderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMeshShaderPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple const &, uint32_t const &, uint32_t const &, uint32_t const &, ArrayWrapper1D const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &, uint32_t const &> reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxDrawMeshTasksCount, maxTaskWorkGroupInvocations, maxTaskWorkGroupSize, maxTaskTotalMemorySize, maxTaskOutputCount, maxMeshWorkGroupInvocations, maxMeshWorkGroupSize, maxMeshTotalMemorySize, maxMeshOutputVertices, maxMeshOutputPrimitives, maxMeshMultiviewViewCount, meshOutputPerVertexGranularity, meshOutputPerPrimitiveGranularity ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize ) && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize ) && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); # endif } bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; void * pNext = {}; uint32_t maxDrawMeshTasksCount = {}; uint32_t maxTaskWorkGroupInvocations = {}; ArrayWrapper1D maxTaskWorkGroupSize = {}; uint32_t maxTaskTotalMemorySize = {}; uint32_t maxTaskOutputCount = {}; uint32_t maxMeshWorkGroupInvocations = {}; ArrayWrapper1D maxMeshWorkGroupSize = {}; uint32_t maxMeshTotalMemorySize = {}; uint32_t maxMeshOutputVertices = {}; uint32_t maxMeshOutputPrimitives = {}; uint32_t maxMeshMultiviewViewCount = {}; uint32_t meshOutputPerVertexGranularity = {}; uint32_t meshOutputPerPrimitiveGranularity = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMeshShaderPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceMeshShaderPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceMultiDrawFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawFeaturesEXT.html struct PhysicalDeviceMultiDrawFeaturesEXT { using NativeType = VkPhysicalDeviceMultiDrawFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multiDraw{ multiDraw_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawFeaturesEXT( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiDrawFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiDrawFeaturesEXT & operator=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiDrawFeaturesEXT & operator=( VkPhysicalDeviceMultiDrawFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT & setMultiDraw( Bool32 multiDraw_ ) & VULKAN_HPP_NOEXCEPT { multiDraw = multiDraw_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiDrawFeaturesEXT && setMultiDraw( Bool32 multiDraw_ ) && VULKAN_HPP_NOEXCEPT { multiDraw = multiDraw_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multiDraw ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiDrawFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiDraw == rhs.multiDraw ); # endif } bool operator!=( PhysicalDeviceMultiDrawFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiDrawFeaturesEXT; void * pNext = {}; Bool32 multiDraw = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiDrawFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiDrawFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceMultiDrawPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiDrawPropertiesEXT.html struct PhysicalDeviceMultiDrawPropertiesEXT { using NativeType = VkPhysicalDeviceMultiDrawPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxMultiDrawCount{ maxMultiDrawCount_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiDrawPropertiesEXT( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiDrawPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiDrawPropertiesEXT & operator=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiDrawPropertiesEXT & operator=( VkPhysicalDeviceMultiDrawPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMultiDrawPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiDrawPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxMultiDrawCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiDrawPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiDrawCount == rhs.maxMultiDrawCount ); # endif } bool operator!=( PhysicalDeviceMultiDrawPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiDrawPropertiesEXT; void * pNext = {}; uint32_t maxMultiDrawCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiDrawPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiDrawPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.html struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { using NativeType = VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( Bool32 multisampledRenderToSingleSampled_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & operator=( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT & setMultisampledRenderToSingleSampled( Bool32 multisampledRenderToSingleSampled_ ) & VULKAN_HPP_NOEXCEPT { multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT && setMultisampledRenderToSingleSampled( Bool32 multisampledRenderToSingleSampled_ ) && VULKAN_HPP_NOEXCEPT { multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multisampledRenderToSingleSampled ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multisampledRenderToSingleSampled == rhs.multisampledRenderToSingleSampled ); # endif } bool operator!=( PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; void * pNext = {}; Bool32 multisampledRenderToSingleSampled = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceMultiviewFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewFeatures.html struct PhysicalDeviceMultiviewFeatures { using NativeType = VkPhysicalDeviceMultiviewFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( Bool32 multiview_ = {}, Bool32 multiviewGeometryShader_ = {}, Bool32 multiviewTessellationShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multiview{ multiview_ } , multiviewGeometryShader{ multiviewGeometryShader_ } , multiviewTessellationShader{ multiviewTessellationShader_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiview( Bool32 multiview_ ) & VULKAN_HPP_NOEXCEPT { multiview = multiview_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures && setMultiview( Bool32 multiview_ ) && VULKAN_HPP_NOEXCEPT { multiview = multiview_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) & VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures && setMultiviewGeometryShader( Bool32 multiviewGeometryShader_ ) && VULKAN_HPP_NOEXCEPT { multiviewGeometryShader = multiviewGeometryShader_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) & VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures && setMultiviewTessellationShader( Bool32 multiviewTessellationShader_ ) && VULKAN_HPP_NOEXCEPT { multiviewTessellationShader = multiviewTessellationShader_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multiview, multiviewGeometryShader, multiviewTessellationShader ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiviewFeatures const & ) const = default; #else bool operator==( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiview == rhs.multiview ) && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); # endif } bool operator!=( PhysicalDeviceMultiviewFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; void * pNext = {}; Bool32 multiview = {}; Bool32 multiviewGeometryShader = {}; Bool32 multiviewTessellationShader = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiviewFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiviewFeatures; }; using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { using NativeType = VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( Bool32 perViewPositionAllComponents_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , perViewPositionAllComponents{ perViewPositionAllComponents_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, perViewPositionAllComponents ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & ) const = default; #else bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); # endif } bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; void * pNext = {}; Bool32 perViewPositionAllComponents = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; }; // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM.html struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM { using NativeType = VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( Bool32 multiviewPerViewRenderAreas_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM & setMultiviewPerViewRenderAreas( Bool32 multiviewPerViewRenderAreas_ ) & VULKAN_HPP_NOEXCEPT { multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM && setMultiviewPerViewRenderAreas( Bool32 multiviewPerViewRenderAreas_ ) && VULKAN_HPP_NOEXCEPT { multiviewPerViewRenderAreas = multiviewPerViewRenderAreas_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multiviewPerViewRenderAreas ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewRenderAreas == rhs.multiviewPerViewRenderAreas ); # endif } bool operator!=( PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; void * pNext = {}; Bool32 multiviewPerViewRenderAreas = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM.html struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { using NativeType = VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( Bool32 multiviewPerViewViewports_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , multiviewPerViewViewports{ multiviewPerViewViewports_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & operator=( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM & setMultiviewPerViewViewports( Bool32 multiviewPerViewViewports_ ) & VULKAN_HPP_NOEXCEPT { multiviewPerViewViewports = multiviewPerViewViewports_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM && setMultiviewPerViewViewports( Bool32 multiviewPerViewViewports_ ) && VULKAN_HPP_NOEXCEPT { multiviewPerViewViewports = multiviewPerViewViewports_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, multiviewPerViewViewports ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( multiviewPerViewViewports == rhs.multiviewPerViewViewports ); # endif } bool operator!=( PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; void * pNext = {}; Bool32 multiviewPerViewViewports = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; }; // wrapper struct for struct VkPhysicalDeviceMultiviewProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMultiviewProperties.html struct PhysicalDeviceMultiviewProperties { using NativeType = VkPhysicalDeviceMultiviewProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxMultiviewViewCount{ maxMultiviewViewCount_ } , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMultiviewProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceMultiviewProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMultiviewProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxMultiviewViewCount, maxMultiviewInstanceIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMultiviewProperties const & ) const = default; #else bool operator==( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); # endif } bool operator!=( PhysicalDeviceMultiviewProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; void * pNext = {}; uint32_t maxMultiviewViewCount = {}; uint32_t maxMultiviewInstanceIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMultiviewProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceMultiviewProperties; }; using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; // wrapper struct for struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT.html struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT { using NativeType = VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( Bool32 mutableDescriptorType_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , mutableDescriptorType{ mutableDescriptorType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMutableDescriptorTypeFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceMutableDescriptorTypeFeaturesEXT & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT & setMutableDescriptorType( Bool32 mutableDescriptorType_ ) & VULKAN_HPP_NOEXCEPT { mutableDescriptorType = mutableDescriptorType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesEXT && setMutableDescriptorType( Bool32 mutableDescriptorType_ ) && VULKAN_HPP_NOEXCEPT { mutableDescriptorType = mutableDescriptorType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, mutableDescriptorType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mutableDescriptorType == rhs.mutableDescriptorType ); # endif } bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT; void * pNext = {}; Bool32 mutableDescriptorType = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; }; using PhysicalDeviceMutableDescriptorTypeFeaturesVALVE = PhysicalDeviceMutableDescriptorTypeFeaturesEXT; // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferFeaturesEXT.html struct PhysicalDeviceNestedCommandBufferFeaturesEXT { using NativeType = VkPhysicalDeviceNestedCommandBufferFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( Bool32 nestedCommandBuffer_ = {}, Bool32 nestedCommandBufferRendering_ = {}, Bool32 nestedCommandBufferSimultaneousUse_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , nestedCommandBuffer{ nestedCommandBuffer_ } , nestedCommandBufferRendering{ nestedCommandBufferRendering_ } , nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferFeaturesEXT( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceNestedCommandBufferFeaturesEXT( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceNestedCommandBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceNestedCommandBufferFeaturesEXT & operator=( VkPhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBuffer( Bool32 nestedCommandBuffer_ ) & VULKAN_HPP_NOEXCEPT { nestedCommandBuffer = nestedCommandBuffer_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT && setNestedCommandBuffer( Bool32 nestedCommandBuffer_ ) && VULKAN_HPP_NOEXCEPT { nestedCommandBuffer = nestedCommandBuffer_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBufferRendering( Bool32 nestedCommandBufferRendering_ ) & VULKAN_HPP_NOEXCEPT { nestedCommandBufferRendering = nestedCommandBufferRendering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT && setNestedCommandBufferRendering( Bool32 nestedCommandBufferRendering_ ) && VULKAN_HPP_NOEXCEPT { nestedCommandBufferRendering = nestedCommandBufferRendering_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setNestedCommandBufferSimultaneousUse( Bool32 nestedCommandBufferSimultaneousUse_ ) & VULKAN_HPP_NOEXCEPT { nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT && setNestedCommandBufferSimultaneousUse( Bool32 nestedCommandBufferSimultaneousUse_ ) && VULKAN_HPP_NOEXCEPT { nestedCommandBufferSimultaneousUse = nestedCommandBufferSimultaneousUse_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nestedCommandBuffer == rhs.nestedCommandBuffer ) && ( nestedCommandBufferRendering == rhs.nestedCommandBufferRendering ) && ( nestedCommandBufferSimultaneousUse == rhs.nestedCommandBufferSimultaneousUse ); # endif } bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferFeaturesEXT; void * pNext = {}; Bool32 nestedCommandBuffer = {}; Bool32 nestedCommandBufferRendering = {}; Bool32 nestedCommandBufferSimultaneousUse = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceNestedCommandBufferFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNestedCommandBufferPropertiesEXT.html struct PhysicalDeviceNestedCommandBufferPropertiesEXT { using NativeType = VkPhysicalDeviceNestedCommandBufferPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceNestedCommandBufferPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxCommandBufferNestingLevel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceNestedCommandBufferPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxCommandBufferNestingLevel == rhs.maxCommandBufferNestingLevel ); # endif } bool operator!=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceNestedCommandBufferPropertiesEXT; void * pNext = {}; uint32_t maxCommandBufferNestingLevel = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceNestedCommandBufferPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT.html struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT { using NativeType = VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( Bool32 nonSeamlessCubeMap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , nonSeamlessCubeMap{ nonSeamlessCubeMap_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & operator=( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT & setNonSeamlessCubeMap( Bool32 nonSeamlessCubeMap_ ) & VULKAN_HPP_NOEXCEPT { nonSeamlessCubeMap = nonSeamlessCubeMap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNonSeamlessCubeMapFeaturesEXT && setNonSeamlessCubeMap( Bool32 nonSeamlessCubeMap_ ) && VULKAN_HPP_NOEXCEPT { nonSeamlessCubeMap = nonSeamlessCubeMap_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, nonSeamlessCubeMap ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nonSeamlessCubeMap == rhs.nonSeamlessCubeMap ); # endif } bool operator!=( PhysicalDeviceNonSeamlessCubeMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT; void * pNext = {}; Bool32 nonSeamlessCubeMap = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceOpacityMicromapFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapFeaturesEXT.html struct PhysicalDeviceOpacityMicromapFeaturesEXT { using NativeType = VkPhysicalDeviceOpacityMicromapFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( Bool32 micromap_ = {}, Bool32 micromapCaptureReplay_ = {}, Bool32 micromapHostCommands_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , micromap{ micromap_ } , micromapCaptureReplay{ micromapCaptureReplay_ } , micromapHostCommands{ micromapHostCommands_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapFeaturesEXT( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromap( Bool32 micromap_ ) & VULKAN_HPP_NOEXCEPT { micromap = micromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT && setMicromap( Bool32 micromap_ ) && VULKAN_HPP_NOEXCEPT { micromap = micromap_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapCaptureReplay( Bool32 micromapCaptureReplay_ ) & VULKAN_HPP_NOEXCEPT { micromapCaptureReplay = micromapCaptureReplay_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT && setMicromapCaptureReplay( Bool32 micromapCaptureReplay_ ) && VULKAN_HPP_NOEXCEPT { micromapCaptureReplay = micromapCaptureReplay_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setMicromapHostCommands( Bool32 micromapHostCommands_ ) & VULKAN_HPP_NOEXCEPT { micromapHostCommands = micromapHostCommands_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT && setMicromapHostCommands( Bool32 micromapHostCommands_ ) && VULKAN_HPP_NOEXCEPT { micromapHostCommands = micromapHostCommands_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, micromap, micromapCaptureReplay, micromapHostCommands ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceOpacityMicromapFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( micromap == rhs.micromap ) && ( micromapCaptureReplay == rhs.micromapCaptureReplay ) && ( micromapHostCommands == rhs.micromapHostCommands ); # endif } bool operator!=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapFeaturesEXT; void * pNext = {}; Bool32 micromap = {}; Bool32 micromapCaptureReplay = {}; Bool32 micromapHostCommands = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceOpacityMicromapFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceOpacityMicromapFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceOpacityMicromapPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpacityMicromapPropertiesEXT.html struct PhysicalDeviceOpacityMicromapPropertiesEXT { using NativeType = VkPhysicalDeviceOpacityMicromapPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {}, uint32_t maxOpacity4StateSubdivisionLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ } , maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceOpacityMicromapPropertiesEXT( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceOpacityMicromapPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceOpacityMicromapPropertiesEXT & operator=( VkPhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceOpacityMicromapPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxOpacity2StateSubdivisionLevel, maxOpacity4StateSubdivisionLevel ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceOpacityMicromapPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxOpacity2StateSubdivisionLevel == rhs.maxOpacity2StateSubdivisionLevel ) && ( maxOpacity4StateSubdivisionLevel == rhs.maxOpacity4StateSubdivisionLevel ); # endif } bool operator!=( PhysicalDeviceOpacityMicromapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceOpacityMicromapPropertiesEXT; void * pNext = {}; uint32_t maxOpacity2StateSubdivisionLevel = {}; uint32_t maxOpacity4StateSubdivisionLevel = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceOpacityMicromapPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceOpacityMicromapPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceOpticalFlowFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowFeaturesNV.html struct PhysicalDeviceOpticalFlowFeaturesNV { using NativeType = VkPhysicalDeviceOpticalFlowFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , opticalFlow{ opticalFlow_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceOpticalFlowFeaturesNV( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceOpticalFlowFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceOpticalFlowFeaturesNV & operator=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceOpticalFlowFeaturesNV & operator=( VkPhysicalDeviceOpticalFlowFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV & setOpticalFlow( Bool32 opticalFlow_ ) & VULKAN_HPP_NOEXCEPT { opticalFlow = opticalFlow_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpticalFlowFeaturesNV && setOpticalFlow( Bool32 opticalFlow_ ) && VULKAN_HPP_NOEXCEPT { opticalFlow = opticalFlow_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceOpticalFlowFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, opticalFlow ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceOpticalFlowFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( opticalFlow == rhs.opticalFlow ); # endif } bool operator!=( PhysicalDeviceOpticalFlowFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceOpticalFlowFeaturesNV; void * pNext = {}; Bool32 opticalFlow = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceOpticalFlowFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceOpticalFlowFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceOpticalFlowPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceOpticalFlowPropertiesNV.html struct PhysicalDeviceOpticalFlowPropertiesNV { using NativeType = VkPhysicalDeviceOpticalFlowPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( OpticalFlowGridSizeFlagsNV supportedOutputGridSizes_ = {}, OpticalFlowGridSizeFlagsNV supportedHintGridSizes_ = {}, Bool32 hintSupported_ = {}, Bool32 costSupported_ = {}, Bool32 bidirectionalFlowSupported_ = {}, Bool32 globalFlowSupported_ = {}, uint32_t minWidth_ = {}, uint32_t minHeight_ = {}, uint32_t maxWidth_ = {}, uint32_t maxHeight_ = {}, uint32_t maxNumRegionsOfInterest_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , supportedOutputGridSizes{ supportedOutputGridSizes_ } , supportedHintGridSizes{ supportedHintGridSizes_ } , hintSupported{ hintSupported_ } , costSupported{ costSupported_ } , bidirectionalFlowSupported{ bidirectionalFlowSupported_ } , globalFlowSupported{ globalFlowSupported_ } , minWidth{ minWidth_ } , minHeight{ minHeight_ } , maxWidth{ maxWidth_ } , maxHeight{ maxHeight_ } , maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowPropertiesNV( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceOpticalFlowPropertiesNV( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceOpticalFlowPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceOpticalFlowPropertiesNV & operator=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceOpticalFlowPropertiesNV & operator=( VkPhysicalDeviceOpticalFlowPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceOpticalFlowPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceOpticalFlowPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, supportedOutputGridSizes, supportedHintGridSizes, hintSupported, costSupported, bidirectionalFlowSupported, globalFlowSupported, minWidth, minHeight, maxWidth, maxHeight, maxNumRegionsOfInterest ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceOpticalFlowPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedOutputGridSizes == rhs.supportedOutputGridSizes ) && ( supportedHintGridSizes == rhs.supportedHintGridSizes ) && ( hintSupported == rhs.hintSupported ) && ( costSupported == rhs.costSupported ) && ( bidirectionalFlowSupported == rhs.bidirectionalFlowSupported ) && ( globalFlowSupported == rhs.globalFlowSupported ) && ( minWidth == rhs.minWidth ) && ( minHeight == rhs.minHeight ) && ( maxWidth == rhs.maxWidth ) && ( maxHeight == rhs.maxHeight ) && ( maxNumRegionsOfInterest == rhs.maxNumRegionsOfInterest ); # endif } bool operator!=( PhysicalDeviceOpticalFlowPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceOpticalFlowPropertiesNV; void * pNext = {}; OpticalFlowGridSizeFlagsNV supportedOutputGridSizes = {}; OpticalFlowGridSizeFlagsNV supportedHintGridSizes = {}; Bool32 hintSupported = {}; Bool32 costSupported = {}; Bool32 bidirectionalFlowSupported = {}; Bool32 globalFlowSupported = {}; uint32_t minWidth = {}; uint32_t minHeight = {}; uint32_t maxWidth = {}; uint32_t maxHeight = {}; uint32_t maxNumRegionsOfInterest = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceOpticalFlowPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceOpticalFlowPropertiesNV; }; // wrapper struct for struct VkPhysicalDevicePCIBusInfoPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html struct PhysicalDevicePCIBusInfoPropertiesEXT { using NativeType = VkPhysicalDevicePCIBusInfoPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pciDomain{ pciDomain_ } , pciBus{ pciBus_ } , pciDevice{ pciDevice_ } , pciFunction{ pciFunction_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePCIBusInfoPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePCIBusInfoPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePCIBusInfoPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pciDomain, pciBus, pciDevice, pciFunction ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pciDomain == rhs.pciDomain ) && ( pciBus == rhs.pciBus ) && ( pciDevice == rhs.pciDevice ) && ( pciFunction == rhs.pciFunction ); # endif } bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; void * pNext = {}; uint32_t pciDomain = {}; uint32_t pciBus = {}; uint32_t pciDevice = {}; uint32_t pciFunction = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePCIBusInfoPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePCIBusInfoPropertiesEXT; }; // wrapper struct for struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT.html struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { using NativeType = VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( Bool32 pageableDeviceLocalMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & operator=( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT & setPageableDeviceLocalMemory( Bool32 pageableDeviceLocalMemory_ ) & VULKAN_HPP_NOEXCEPT { pageableDeviceLocalMemory = pageableDeviceLocalMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT && setPageableDeviceLocalMemory( Bool32 pageableDeviceLocalMemory_ ) && VULKAN_HPP_NOEXCEPT { pageableDeviceLocalMemory = pageableDeviceLocalMemory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pageableDeviceLocalMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pageableDeviceLocalMemory == rhs.pageableDeviceLocalMemory ); # endif } bool operator!=( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; void * pNext = {}; Bool32 pageableDeviceLocalMemory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV.html struct PhysicalDevicePartitionedAccelerationStructureFeaturesNV { using NativeType = VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( Bool32 partitionedAccelerationStructure_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , partitionedAccelerationStructure{ partitionedAccelerationStructure_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructureFeaturesNV( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePartitionedAccelerationStructureFeaturesNV( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePartitionedAccelerationStructureFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePartitionedAccelerationStructureFeaturesNV & operator=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePartitionedAccelerationStructureFeaturesNV & operator=( VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV & setPartitionedAccelerationStructure( Bool32 partitionedAccelerationStructure_ ) & VULKAN_HPP_NOEXCEPT { partitionedAccelerationStructure = partitionedAccelerationStructure_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePartitionedAccelerationStructureFeaturesNV && setPartitionedAccelerationStructure( Bool32 partitionedAccelerationStructure_ ) && VULKAN_HPP_NOEXCEPT { partitionedAccelerationStructure = partitionedAccelerationStructure_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructureFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, partitionedAccelerationStructure ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( partitionedAccelerationStructure == rhs.partitionedAccelerationStructure ); # endif } bool operator!=( PhysicalDevicePartitionedAccelerationStructureFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructureFeaturesNV; void * pNext = {}; Bool32 partitionedAccelerationStructure = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePartitionedAccelerationStructureFeaturesNV; }; // wrapper struct for struct VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV.html struct PhysicalDevicePartitionedAccelerationStructurePropertiesNV { using NativeType = VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( uint32_t maxPartitionCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxPartitionCount{ maxPartitionCount_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePartitionedAccelerationStructurePropertiesNV( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePartitionedAccelerationStructurePropertiesNV( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePartitionedAccelerationStructurePropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePartitionedAccelerationStructurePropertiesNV & operator=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePartitionedAccelerationStructurePropertiesNV & operator=( VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePartitionedAccelerationStructurePropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxPartitionCount ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & ) const = default; #else bool operator==( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPartitionCount == rhs.maxPartitionCount ); # endif } bool operator!=( PhysicalDevicePartitionedAccelerationStructurePropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePartitionedAccelerationStructurePropertiesNV; void * pNext = {}; uint32_t maxPartitionCount = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePartitionedAccelerationStructurePropertiesNV; }; // wrapper struct for struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerStageDescriptorSetFeaturesNV.html struct PhysicalDevicePerStageDescriptorSetFeaturesNV { using NativeType = VkPhysicalDevicePerStageDescriptorSetFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( Bool32 perStageDescriptorSet_ = {}, Bool32 dynamicPipelineLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , perStageDescriptorSet{ perStageDescriptorSet_ } , dynamicPipelineLayout{ dynamicPipelineLayout_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerStageDescriptorSetFeaturesNV( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerStageDescriptorSetFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePerStageDescriptorSetFeaturesNV & operator=( VkPhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPerStageDescriptorSet( Bool32 perStageDescriptorSet_ ) & VULKAN_HPP_NOEXCEPT { perStageDescriptorSet = perStageDescriptorSet_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV && setPerStageDescriptorSet( Bool32 perStageDescriptorSet_ ) && VULKAN_HPP_NOEXCEPT { perStageDescriptorSet = perStageDescriptorSet_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setDynamicPipelineLayout( Bool32 dynamicPipelineLayout_ ) & VULKAN_HPP_NOEXCEPT { dynamicPipelineLayout = dynamicPipelineLayout_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV && setDynamicPipelineLayout( Bool32 dynamicPipelineLayout_ ) && VULKAN_HPP_NOEXCEPT { dynamicPipelineLayout = dynamicPipelineLayout_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, perStageDescriptorSet, dynamicPipelineLayout ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePerStageDescriptorSetFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( perStageDescriptorSet == rhs.perStageDescriptorSet ) && ( dynamicPipelineLayout == rhs.dynamicPipelineLayout ); # endif } bool operator!=( PhysicalDevicePerStageDescriptorSetFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV; void * pNext = {}; Bool32 perStageDescriptorSet = {}; Bool32 dynamicPipelineLayout = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePerStageDescriptorSetFeaturesNV; }; // wrapper struct for struct VkPhysicalDevicePerformanceCountersByRegionFeaturesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceCountersByRegionFeaturesARM.html struct PhysicalDevicePerformanceCountersByRegionFeaturesARM { using NativeType = VkPhysicalDevicePerformanceCountersByRegionFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceCountersByRegionFeaturesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionFeaturesARM( Bool32 performanceCountersByRegion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , performanceCountersByRegion{ performanceCountersByRegion_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionFeaturesARM( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceCountersByRegionFeaturesARM( VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceCountersByRegionFeaturesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePerformanceCountersByRegionFeaturesARM & operator=( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePerformanceCountersByRegionFeaturesARM & operator=( VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM & setPerformanceCountersByRegion( Bool32 performanceCountersByRegion_ ) & VULKAN_HPP_NOEXCEPT { performanceCountersByRegion = performanceCountersByRegion_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceCountersByRegionFeaturesARM && setPerformanceCountersByRegion( Bool32 performanceCountersByRegion_ ) && VULKAN_HPP_NOEXCEPT { performanceCountersByRegion = performanceCountersByRegion_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionFeaturesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, performanceCountersByRegion ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & ) const = default; #else bool operator==( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCountersByRegion == rhs.performanceCountersByRegion ); # endif } bool operator!=( PhysicalDevicePerformanceCountersByRegionFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePerformanceCountersByRegionFeaturesARM; void * pNext = {}; Bool32 performanceCountersByRegion = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePerformanceCountersByRegionFeaturesARM; }; #endif template <> struct CppType { using Type = PhysicalDevicePerformanceCountersByRegionFeaturesARM; }; // wrapper struct for struct VkPhysicalDevicePerformanceCountersByRegionPropertiesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceCountersByRegionPropertiesARM.html struct PhysicalDevicePerformanceCountersByRegionPropertiesARM { using NativeType = VkPhysicalDevicePerformanceCountersByRegionPropertiesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceCountersByRegionPropertiesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionPropertiesARM( uint32_t maxPerRegionPerformanceCounters_ = {}, Extent2D performanceCounterRegionSize_ = {}, uint32_t rowStrideAlignment_ = {}, uint32_t regionAlignment_ = {}, Bool32 identityTransformOrder_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxPerRegionPerformanceCounters{ maxPerRegionPerformanceCounters_ } , performanceCounterRegionSize{ performanceCounterRegionSize_ } , rowStrideAlignment{ rowStrideAlignment_ } , regionAlignment{ regionAlignment_ } , identityTransformOrder{ identityTransformOrder_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceCountersByRegionPropertiesARM( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceCountersByRegionPropertiesARM( VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceCountersByRegionPropertiesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePerformanceCountersByRegionPropertiesARM & operator=( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePerformanceCountersByRegionPropertiesARM & operator=( VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceCountersByRegionPropertiesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxPerRegionPerformanceCounters, performanceCounterRegionSize, rowStrideAlignment, regionAlignment, identityTransformOrder ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & ) const = default; #else bool operator==( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPerRegionPerformanceCounters == rhs.maxPerRegionPerformanceCounters ) && ( performanceCounterRegionSize == rhs.performanceCounterRegionSize ) && ( rowStrideAlignment == rhs.rowStrideAlignment ) && ( regionAlignment == rhs.regionAlignment ) && ( identityTransformOrder == rhs.identityTransformOrder ); # endif } bool operator!=( PhysicalDevicePerformanceCountersByRegionPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePerformanceCountersByRegionPropertiesARM; void * pNext = {}; uint32_t maxPerRegionPerformanceCounters = {}; Extent2D performanceCounterRegionSize = {}; uint32_t rowStrideAlignment = {}; uint32_t regionAlignment = {}; Bool32 identityTransformOrder = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePerformanceCountersByRegionPropertiesARM; }; #endif template <> struct CppType { using Type = PhysicalDevicePerformanceCountersByRegionPropertiesARM; }; // wrapper struct for struct VkPhysicalDevicePerformanceQueryFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryFeaturesKHR.html struct PhysicalDevicePerformanceQueryFeaturesKHR { using NativeType = VkPhysicalDevicePerformanceQueryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( Bool32 performanceCounterQueryPools_ = {}, Bool32 performanceCounterMultipleQueryPools_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , performanceCounterQueryPools{ performanceCounterQueryPools_ } , performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( Bool32 performanceCounterQueryPools_ ) & VULKAN_HPP_NOEXCEPT { performanceCounterQueryPools = performanceCounterQueryPools_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR && setPerformanceCounterQueryPools( Bool32 performanceCounterQueryPools_ ) && VULKAN_HPP_NOEXCEPT { performanceCounterQueryPools = performanceCounterQueryPools_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( Bool32 performanceCounterMultipleQueryPools_ ) & VULKAN_HPP_NOEXCEPT { performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR && setPerformanceCounterMultipleQueryPools( Bool32 performanceCounterMultipleQueryPools_ ) && VULKAN_HPP_NOEXCEPT { performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePerformanceQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, performanceCounterQueryPools, performanceCounterMultipleQueryPools ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools ) && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools ); # endif } bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR; void * pNext = {}; Bool32 performanceCounterQueryPools = {}; Bool32 performanceCounterMultipleQueryPools = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePerformanceQueryFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePerformanceQueryFeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePerformanceQueryPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePerformanceQueryPropertiesKHR.html struct PhysicalDevicePerformanceQueryPropertiesKHR { using NativeType = VkPhysicalDevicePerformanceQueryPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( Bool32 allowCommandBufferQueryCopies_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePerformanceQueryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePerformanceQueryPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, allowCommandBufferQueryCopies ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies ); # endif } bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR; void * pNext = {}; Bool32 allowCommandBufferQueryCopies = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePerformanceQueryPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePerformanceQueryPropertiesKHR; }; // wrapper struct for struct VkPhysicalDevicePipelineBinaryFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryFeaturesKHR.html struct PhysicalDevicePipelineBinaryFeaturesKHR { using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( Bool32 pipelineBinaries_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineBinaries{ pipelineBinaries_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( Bool32 pipelineBinaries_ ) & VULKAN_HPP_NOEXCEPT { pipelineBinaries = pipelineBinaries_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR && setPipelineBinaries( Bool32 pipelineBinaries_ ) && VULKAN_HPP_NOEXCEPT { pipelineBinaries = pipelineBinaries_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineBinaries ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaries == rhs.pipelineBinaries ); # endif } bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; void * pNext = {}; Bool32 pipelineBinaries = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineBinaryFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineBinaryFeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePipelineBinaryPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineBinaryPropertiesKHR.html struct PhysicalDevicePipelineBinaryPropertiesKHR { using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( Bool32 pipelineBinaryInternalCache_ = {}, Bool32 pipelineBinaryInternalCacheControl_ = {}, Bool32 pipelineBinaryPrefersInternalCache_ = {}, Bool32 pipelineBinaryPrecompiledInternalCache_ = {}, Bool32 pipelineBinaryCompressedData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ } , pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ } , pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ } , pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ } , pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineBinaryPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineBinaryInternalCache, pipelineBinaryInternalCacheControl, pipelineBinaryPrefersInternalCache, pipelineBinaryPrecompiledInternalCache, pipelineBinaryCompressedData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache ) && ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl ) && ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache ) && ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache ) && ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData ); # endif } bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; void * pNext = {}; Bool32 pipelineBinaryInternalCache = {}; Bool32 pipelineBinaryInternalCacheControl = {}; Bool32 pipelineBinaryPrefersInternalCache = {}; Bool32 pipelineBinaryPrecompiledInternalCache = {}; Bool32 pipelineBinaryCompressedData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineBinaryPropertiesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineBinaryPropertiesKHR; }; // wrapper struct for struct VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC.html struct PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC { using NativeType = VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( Bool32 pipelineCacheIncrementalMode_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineCacheIncrementalMode{ pipelineCacheIncrementalMode_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & operator=( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & operator=( VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC & setPipelineCacheIncrementalMode( Bool32 pipelineCacheIncrementalMode_ ) & VULKAN_HPP_NOEXCEPT { pipelineCacheIncrementalMode = pipelineCacheIncrementalMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC && setPipelineCacheIncrementalMode( Bool32 pipelineCacheIncrementalMode_ ) && VULKAN_HPP_NOEXCEPT { pipelineCacheIncrementalMode = pipelineCacheIncrementalMode_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCacheIncrementalModeFeaturesSEC *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineCacheIncrementalMode ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & ) const = default; #else bool operator==( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCacheIncrementalMode == rhs.pipelineCacheIncrementalMode ); # endif } bool operator!=( PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineCacheIncrementalModeFeaturesSEC; void * pNext = {}; Bool32 pipelineCacheIncrementalMode = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineCacheIncrementalModeFeaturesSEC; }; // wrapper struct for struct VkPhysicalDevicePipelineCreationCacheControlFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineCreationCacheControlFeatures.html struct PhysicalDevicePipelineCreationCacheControlFeatures { using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( Bool32 pipelineCreationCacheControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineCreationCacheControlFeatures( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineCreationCacheControlFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineCreationCacheControlFeatures & operator=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineCreationCacheControlFeatures & operator=( VkPhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures & setPipelineCreationCacheControl( Bool32 pipelineCreationCacheControl_ ) & VULKAN_HPP_NOEXCEPT { pipelineCreationCacheControl = pipelineCreationCacheControl_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeatures && setPipelineCreationCacheControl( Bool32 pipelineCreationCacheControl_ ) && VULKAN_HPP_NOEXCEPT { pipelineCreationCacheControl = pipelineCreationCacheControl_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCreationCacheControlFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCreationCacheControlFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineCreationCacheControlFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineCreationCacheControl ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeatures const & ) const = default; #else bool operator==( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl ); # endif } bool operator!=( PhysicalDevicePipelineCreationCacheControlFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeatures; void * pNext = {}; Bool32 pipelineCreationCacheControl = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineCreationCacheControlFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineCreationCacheControlFeatures; }; using PhysicalDevicePipelineCreationCacheControlFeaturesEXT = PhysicalDevicePipelineCreationCacheControlFeatures; // wrapper struct for struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR.html struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR { using NativeType = VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( Bool32 pipelineExecutableInfo_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineExecutableInfo{ pipelineExecutableInfo_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( Bool32 pipelineExecutableInfo_ ) & VULKAN_HPP_NOEXCEPT { pipelineExecutableInfo = pipelineExecutableInfo_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR && setPipelineExecutableInfo( Bool32 pipelineExecutableInfo_ ) && VULKAN_HPP_NOEXCEPT { pipelineExecutableInfo = pipelineExecutableInfo_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineExecutableInfo ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); # endif } bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; void * pNext = {}; Bool32 pipelineExecutableInfo = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT.html struct PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { using NativeType = VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( Bool32 pipelineLibraryGroupHandles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & operator=( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT & setPipelineLibraryGroupHandles( Bool32 pipelineLibraryGroupHandles_ ) & VULKAN_HPP_NOEXCEPT { pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT && setPipelineLibraryGroupHandles( Bool32 pipelineLibraryGroupHandles_ ) && VULKAN_HPP_NOEXCEPT { pipelineLibraryGroupHandles = pipelineLibraryGroupHandles_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineLibraryGroupHandles ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineLibraryGroupHandles == rhs.pipelineLibraryGroupHandles ); # endif } bool operator!=( PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; void * pNext = {}; Bool32 pipelineLibraryGroupHandles = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePipelineOpacityMicromapFeaturesARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineOpacityMicromapFeaturesARM.html struct PhysicalDevicePipelineOpacityMicromapFeaturesARM { using NativeType = VkPhysicalDevicePipelineOpacityMicromapFeaturesARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( Bool32 pipelineOpacityMicromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineOpacityMicromap{ pipelineOpacityMicromap_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineOpacityMicromapFeaturesARM( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineOpacityMicromapFeaturesARM( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineOpacityMicromapFeaturesARM( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineOpacityMicromapFeaturesARM & operator=( VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM & setPipelineOpacityMicromap( Bool32 pipelineOpacityMicromap_ ) & VULKAN_HPP_NOEXCEPT { pipelineOpacityMicromap = pipelineOpacityMicromap_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineOpacityMicromapFeaturesARM && setPipelineOpacityMicromap( Bool32 pipelineOpacityMicromap_ ) && VULKAN_HPP_NOEXCEPT { pipelineOpacityMicromap = pipelineOpacityMicromap_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineOpacityMicromapFeaturesARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineOpacityMicromap ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & ) const = default; #else bool operator==( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineOpacityMicromap == rhs.pipelineOpacityMicromap ); # endif } bool operator!=( PhysicalDevicePipelineOpacityMicromapFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineOpacityMicromapFeaturesARM; void * pNext = {}; Bool32 pipelineOpacityMicromap = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineOpacityMicromapFeaturesARM; }; // wrapper struct for struct VkPhysicalDevicePipelinePropertiesFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelinePropertiesFeaturesEXT.html struct PhysicalDevicePipelinePropertiesFeaturesEXT { using NativeType = VkPhysicalDevicePipelinePropertiesFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( Bool32 pipelinePropertiesIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelinePropertiesFeaturesEXT( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelinePropertiesFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelinePropertiesFeaturesEXT & operator=( VkPhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT & setPipelinePropertiesIdentifier( Bool32 pipelinePropertiesIdentifier_ ) & VULKAN_HPP_NOEXCEPT { pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelinePropertiesFeaturesEXT && setPipelinePropertiesIdentifier( Bool32 pipelinePropertiesIdentifier_ ) && VULKAN_HPP_NOEXCEPT { pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelinePropertiesFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelinePropertiesFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelinePropertiesIdentifier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelinePropertiesFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelinePropertiesIdentifier == rhs.pipelinePropertiesIdentifier ); # endif } bool operator!=( PhysicalDevicePipelinePropertiesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelinePropertiesFeaturesEXT; void * pNext = {}; Bool32 pipelinePropertiesIdentifier = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePipelineProtectedAccessFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineProtectedAccessFeatures.html struct PhysicalDevicePipelineProtectedAccessFeatures { using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( Bool32 pipelineProtectedAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineProtectedAccess{ pipelineProtectedAccess_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPipelineProtectedAccess( Bool32 pipelineProtectedAccess_ ) & VULKAN_HPP_NOEXCEPT { pipelineProtectedAccess = pipelineProtectedAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures && setPipelineProtectedAccess( Bool32 pipelineProtectedAccess_ ) && VULKAN_HPP_NOEXCEPT { pipelineProtectedAccess = pipelineProtectedAccess_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineProtectedAccessFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineProtectedAccessFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineProtectedAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default; #else bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ); # endif } bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; void * pNext = {}; Bool32 pipelineProtectedAccess = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineProtectedAccessFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineProtectedAccessFeatures; }; using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; // wrapper struct for struct VkPhysicalDevicePipelineRobustnessFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessFeatures.html struct PhysicalDevicePipelineRobustnessFeatures { using NativeType = VkPhysicalDevicePipelineRobustnessFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( Bool32 pipelineRobustness_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineRobustness{ pipelineRobustness_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPipelineRobustness( Bool32 pipelineRobustness_ ) & VULKAN_HPP_NOEXCEPT { pipelineRobustness = pipelineRobustness_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures && setPipelineRobustness( Bool32 pipelineRobustness_ ) && VULKAN_HPP_NOEXCEPT { pipelineRobustness = pipelineRobustness_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pipelineRobustness ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default; #else bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineRobustness == rhs.pipelineRobustness ); # endif } bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; void * pNext = {}; Bool32 pipelineRobustness = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineRobustnessFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineRobustnessFeatures; }; using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; // wrapper struct for struct VkPhysicalDevicePipelineRobustnessProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePipelineRobustnessProperties.html struct PhysicalDevicePipelineRobustnessProperties { using NativeType = VkPhysicalDevicePipelineRobustnessProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault, PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = PipelineRobustnessBufferBehavior::eDeviceDefault, PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = PipelineRobustnessBufferBehavior::eDeviceDefault, PipelineRobustnessImageBehavior defaultRobustnessImages_ = PipelineRobustnessImageBehavior::eDeviceDefault, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } , defaultRobustnessImages{ defaultRobustnessImages_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePipelineRobustnessProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, defaultRobustnessStorageBuffers, defaultRobustnessUniformBuffers, defaultRobustnessVertexInputs, defaultRobustnessImages ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default; #else bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ); # endif } bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessProperties; void * pNext = {}; PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault; PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = PipelineRobustnessBufferBehavior::eDeviceDefault; PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = PipelineRobustnessBufferBehavior::eDeviceDefault; PipelineRobustnessImageBehavior defaultRobustnessImages = PipelineRobustnessImageBehavior::eDeviceDefault; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePipelineRobustnessProperties; }; #endif template <> struct CppType { using Type = PhysicalDevicePipelineRobustnessProperties; }; using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; // wrapper struct for struct VkPhysicalDevicePointClippingProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePointClippingProperties.html struct PhysicalDevicePointClippingProperties { using NativeType = VkPhysicalDevicePointClippingProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PointClippingBehavior pointClippingBehavior_ = PointClippingBehavior::eAllClipPlanes, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pointClippingBehavior{ pointClippingBehavior_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePointClippingProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePointClippingProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePointClippingProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePointClippingProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pointClippingBehavior ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePointClippingProperties const & ) const = default; #else bool operator==( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pointClippingBehavior == rhs.pointClippingBehavior ); # endif } bool operator!=( PhysicalDevicePointClippingProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; void * pNext = {}; PointClippingBehavior pointClippingBehavior = PointClippingBehavior::eAllClipPlanes; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePointClippingProperties; }; #endif template <> struct CppType { using Type = PhysicalDevicePointClippingProperties; }; using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDevicePortabilitySubsetFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetFeaturesKHR.html struct PhysicalDevicePortabilitySubsetFeaturesKHR { using NativeType = VkPhysicalDevicePortabilitySubsetFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( Bool32 constantAlphaColorBlendFactors_ = {}, Bool32 events_ = {}, Bool32 imageViewFormatReinterpretation_ = {}, Bool32 imageViewFormatSwizzle_ = {}, Bool32 imageView2DOn3DImage_ = {}, Bool32 multisampleArrayImage_ = {}, Bool32 mutableComparisonSamplers_ = {}, Bool32 pointPolygons_ = {}, Bool32 samplerMipLodBias_ = {}, Bool32 separateStencilMaskRef_ = {}, Bool32 shaderSampleRateInterpolationFunctions_ = {}, Bool32 tessellationIsolines_ = {}, Bool32 tessellationPointMode_ = {}, Bool32 triangleFans_ = {}, Bool32 vertexAttributeAccessBeyondStride_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ } , events{ events_ } , imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ } , imageViewFormatSwizzle{ imageViewFormatSwizzle_ } , imageView2DOn3DImage{ imageView2DOn3DImage_ } , multisampleArrayImage{ multisampleArrayImage_ } , mutableComparisonSamplers{ mutableComparisonSamplers_ } , pointPolygons{ pointPolygons_ } , samplerMipLodBias{ samplerMipLodBias_ } , separateStencilMaskRef{ separateStencilMaskRef_ } , shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ } , tessellationIsolines{ tessellationIsolines_ } , tessellationPointMode{ tessellationPointMode_ } , triangleFans{ triangleFans_ } , vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( Bool32 constantAlphaColorBlendFactors_ ) & VULKAN_HPP_NOEXCEPT { constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setConstantAlphaColorBlendFactors( Bool32 constantAlphaColorBlendFactors_ ) && VULKAN_HPP_NOEXCEPT { constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( Bool32 events_ ) & VULKAN_HPP_NOEXCEPT { events = events_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setEvents( Bool32 events_ ) && VULKAN_HPP_NOEXCEPT { events = events_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( Bool32 imageViewFormatReinterpretation_ ) & VULKAN_HPP_NOEXCEPT { imageViewFormatReinterpretation = imageViewFormatReinterpretation_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setImageViewFormatReinterpretation( Bool32 imageViewFormatReinterpretation_ ) && VULKAN_HPP_NOEXCEPT { imageViewFormatReinterpretation = imageViewFormatReinterpretation_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( Bool32 imageViewFormatSwizzle_ ) & VULKAN_HPP_NOEXCEPT { imageViewFormatSwizzle = imageViewFormatSwizzle_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setImageViewFormatSwizzle( Bool32 imageViewFormatSwizzle_ ) && VULKAN_HPP_NOEXCEPT { imageViewFormatSwizzle = imageViewFormatSwizzle_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( Bool32 imageView2DOn3DImage_ ) & VULKAN_HPP_NOEXCEPT { imageView2DOn3DImage = imageView2DOn3DImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setImageView2DOn3DImage( Bool32 imageView2DOn3DImage_ ) && VULKAN_HPP_NOEXCEPT { imageView2DOn3DImage = imageView2DOn3DImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( Bool32 multisampleArrayImage_ ) & VULKAN_HPP_NOEXCEPT { multisampleArrayImage = multisampleArrayImage_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setMultisampleArrayImage( Bool32 multisampleArrayImage_ ) && VULKAN_HPP_NOEXCEPT { multisampleArrayImage = multisampleArrayImage_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( Bool32 mutableComparisonSamplers_ ) & VULKAN_HPP_NOEXCEPT { mutableComparisonSamplers = mutableComparisonSamplers_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setMutableComparisonSamplers( Bool32 mutableComparisonSamplers_ ) && VULKAN_HPP_NOEXCEPT { mutableComparisonSamplers = mutableComparisonSamplers_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( Bool32 pointPolygons_ ) & VULKAN_HPP_NOEXCEPT { pointPolygons = pointPolygons_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setPointPolygons( Bool32 pointPolygons_ ) && VULKAN_HPP_NOEXCEPT { pointPolygons = pointPolygons_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( Bool32 samplerMipLodBias_ ) & VULKAN_HPP_NOEXCEPT { samplerMipLodBias = samplerMipLodBias_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setSamplerMipLodBias( Bool32 samplerMipLodBias_ ) && VULKAN_HPP_NOEXCEPT { samplerMipLodBias = samplerMipLodBias_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( Bool32 separateStencilMaskRef_ ) & VULKAN_HPP_NOEXCEPT { separateStencilMaskRef = separateStencilMaskRef_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setSeparateStencilMaskRef( Bool32 separateStencilMaskRef_ ) && VULKAN_HPP_NOEXCEPT { separateStencilMaskRef = separateStencilMaskRef_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( Bool32 shaderSampleRateInterpolationFunctions_ ) & VULKAN_HPP_NOEXCEPT { shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setShaderSampleRateInterpolationFunctions( Bool32 shaderSampleRateInterpolationFunctions_ ) && VULKAN_HPP_NOEXCEPT { shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( Bool32 tessellationIsolines_ ) & VULKAN_HPP_NOEXCEPT { tessellationIsolines = tessellationIsolines_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setTessellationIsolines( Bool32 tessellationIsolines_ ) && VULKAN_HPP_NOEXCEPT { tessellationIsolines = tessellationIsolines_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( Bool32 tessellationPointMode_ ) & VULKAN_HPP_NOEXCEPT { tessellationPointMode = tessellationPointMode_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setTessellationPointMode( Bool32 tessellationPointMode_ ) && VULKAN_HPP_NOEXCEPT { tessellationPointMode = tessellationPointMode_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( Bool32 triangleFans_ ) & VULKAN_HPP_NOEXCEPT { triangleFans = triangleFans_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setTriangleFans( Bool32 triangleFans_ ) && VULKAN_HPP_NOEXCEPT { triangleFans = triangleFans_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( Bool32 vertexAttributeAccessBeyondStride_ ) & VULKAN_HPP_NOEXCEPT { vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR && setVertexAttributeAccessBeyondStride( Bool32 vertexAttributeAccessBeyondStride_ ) && VULKAN_HPP_NOEXCEPT { vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, constantAlphaColorBlendFactors, events, imageViewFormatReinterpretation, imageViewFormatSwizzle, imageView2DOn3DImage, multisampleArrayImage, mutableComparisonSamplers, pointPolygons, samplerMipLodBias, separateStencilMaskRef, shaderSampleRateInterpolationFunctions, tessellationIsolines, tessellationPointMode, triangleFans, vertexAttributeAccessBeyondStride ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const & ) const = default; # else bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors ) && ( events == rhs.events ) && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation ) && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle ) && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage ) && ( multisampleArrayImage == rhs.multisampleArrayImage ) && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers ) && ( pointPolygons == rhs.pointPolygons ) && ( samplerMipLodBias == rhs.samplerMipLodBias ) && ( separateStencilMaskRef == rhs.separateStencilMaskRef ) && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions ) && ( tessellationIsolines == rhs.tessellationIsolines ) && ( tessellationPointMode == rhs.tessellationPointMode ) && ( triangleFans == rhs.triangleFans ) && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride ); # endif } bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR; void * pNext = {}; Bool32 constantAlphaColorBlendFactors = {}; Bool32 events = {}; Bool32 imageViewFormatReinterpretation = {}; Bool32 imageViewFormatSwizzle = {}; Bool32 imageView2DOn3DImage = {}; Bool32 multisampleArrayImage = {}; Bool32 mutableComparisonSamplers = {}; Bool32 pointPolygons = {}; Bool32 samplerMipLodBias = {}; Bool32 separateStencilMaskRef = {}; Bool32 shaderSampleRateInterpolationFunctions = {}; Bool32 tessellationIsolines = {}; Bool32 tessellationPointMode = {}; Bool32 triangleFans = {}; Bool32 vertexAttributeAccessBeyondStride = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; }; # endif template <> struct CppType { using Type = PhysicalDevicePortabilitySubsetFeaturesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDevicePortabilitySubsetPropertiesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePortabilitySubsetPropertiesKHR.html struct PhysicalDevicePortabilitySubsetPropertiesKHR { using NativeType = VkPhysicalDevicePortabilitySubsetPropertiesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePortabilitySubsetPropertiesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, minVertexInputBindingStrideAlignment ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const & ) const = default; # else bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment ); # endif } bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR; void * pNext = {}; uint32_t minVertexInputBindingStrideAlignment = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; }; # endif template <> struct CppType { using Type = PhysicalDevicePortabilitySubsetPropertiesKHR; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkPhysicalDevicePresentBarrierFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentBarrierFeaturesNV.html struct PhysicalDevicePresentBarrierFeaturesNV { using NativeType = VkPhysicalDevicePresentBarrierFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( Bool32 presentBarrier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentBarrier{ presentBarrier_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentBarrierFeaturesNV & operator=( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPresentBarrier( Bool32 presentBarrier_ ) & VULKAN_HPP_NOEXCEPT { presentBarrier = presentBarrier_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV && setPresentBarrier( Bool32 presentBarrier_ ) && VULKAN_HPP_NOEXCEPT { presentBarrier = presentBarrier_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentBarrierFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentBarrierFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentBarrier ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); # endif } bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; void * pNext = {}; Bool32 presentBarrier = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentBarrierFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentBarrierFeaturesNV; }; // wrapper struct for struct VkPhysicalDevicePresentId2FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentId2FeaturesKHR.html struct PhysicalDevicePresentId2FeaturesKHR { using NativeType = VkPhysicalDevicePresentId2FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentId2FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR( Bool32 presentId2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentId2{ presentId2_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentId2FeaturesKHR( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentId2FeaturesKHR( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentId2FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentId2FeaturesKHR & operator=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentId2FeaturesKHR & operator=( VkPhysicalDevicePresentId2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR & setPresentId2( Bool32 presentId2_ ) & VULKAN_HPP_NOEXCEPT { presentId2 = presentId2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentId2FeaturesKHR && setPresentId2( Bool32 presentId2_ ) && VULKAN_HPP_NOEXCEPT { presentId2 = presentId2_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentId2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentId2FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentId2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentId2FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentId2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentId2FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId2 == rhs.presentId2 ); # endif } bool operator!=( PhysicalDevicePresentId2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentId2FeaturesKHR; void * pNext = {}; Bool32 presentId2 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentId2FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentId2FeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePresentIdFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentIdFeaturesKHR.html struct PhysicalDevicePresentIdFeaturesKHR { using NativeType = VkPhysicalDevicePresentIdFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentId{ presentId_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentIdFeaturesKHR & operator=( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPresentId( Bool32 presentId_ ) & VULKAN_HPP_NOEXCEPT { presentId = presentId_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR && setPresentId( Bool32 presentId_ ) && VULKAN_HPP_NOEXCEPT { presentId = presentId_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentIdFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentIdFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentIdFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentId ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); # endif } bool operator!=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; void * pNext = {}; Bool32 presentId = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentIdFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentIdFeaturesKHR; }; #if defined( VK_ENABLE_BETA_EXTENSIONS ) // wrapper struct for struct VkPhysicalDevicePresentMeteringFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentMeteringFeaturesNV.html struct PhysicalDevicePresentMeteringFeaturesNV { using NativeType = VkPhysicalDevicePresentMeteringFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV; # if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( Bool32 presentMetering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentMetering{ presentMetering_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentMeteringFeaturesNV( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentMeteringFeaturesNV( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentMeteringFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentMeteringFeaturesNV & operator=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; # endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentMeteringFeaturesNV & operator=( VkPhysicalDevicePresentMeteringFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } # if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV & setPresentMetering( Bool32 presentMetering_ ) & VULKAN_HPP_NOEXCEPT { presentMetering = presentMetering_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentMeteringFeaturesNV && setPresentMetering( Bool32 presentMetering_ ) && VULKAN_HPP_NOEXCEPT { presentMetering = presentMetering_; return std::move( *this ); } # endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentMeteringFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentMeteringFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentMeteringFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentMeteringFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } # if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentMetering ); } # endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentMeteringFeaturesNV const & ) const = default; # else bool operator==( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentMetering == rhs.presentMetering ); # endif } bool operator!=( PhysicalDevicePresentMeteringFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } # endif public: StructureType sType = StructureType::ePhysicalDevicePresentMeteringFeaturesNV; void * pNext = {}; Bool32 presentMetering = {}; }; # if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentMeteringFeaturesNV; }; # endif template <> struct CppType { using Type = PhysicalDevicePresentMeteringFeaturesNV; }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ // wrapper struct for struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR.html struct PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR { using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( Bool32 presentModeFifoLatestReady_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentModeFifoLatestReady{ presentModeFifoLatestReady_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR & setPresentModeFifoLatestReady( Bool32 presentModeFifoLatestReady_ ) & VULKAN_HPP_NOEXCEPT { presentModeFifoLatestReady = presentModeFifoLatestReady_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR && setPresentModeFifoLatestReady( Bool32 presentModeFifoLatestReady_ ) && VULKAN_HPP_NOEXCEPT { presentModeFifoLatestReady = presentModeFifoLatestReady_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentModeFifoLatestReady ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); # endif } bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; void * pNext = {}; Bool32 presentModeFifoLatestReady = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; }; using PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = PhysicalDevicePresentModeFifoLatestReadyFeaturesKHR; // wrapper struct for struct VkPhysicalDevicePresentTimingFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentTimingFeaturesEXT.html struct PhysicalDevicePresentTimingFeaturesEXT { using NativeType = VkPhysicalDevicePresentTimingFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentTimingFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentTimingFeaturesEXT( Bool32 presentTiming_ = {}, Bool32 presentAtAbsoluteTime_ = {}, Bool32 presentAtRelativeTime_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentTiming{ presentTiming_ } , presentAtAbsoluteTime{ presentAtAbsoluteTime_ } , presentAtRelativeTime{ presentAtRelativeTime_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentTimingFeaturesEXT( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentTimingFeaturesEXT( VkPhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentTimingFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentTimingFeaturesEXT & operator=( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentTimingFeaturesEXT & operator=( VkPhysicalDevicePresentTimingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentTiming( Bool32 presentTiming_ ) & VULKAN_HPP_NOEXCEPT { presentTiming = presentTiming_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT && setPresentTiming( Bool32 presentTiming_ ) && VULKAN_HPP_NOEXCEPT { presentTiming = presentTiming_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentAtAbsoluteTime( Bool32 presentAtAbsoluteTime_ ) & VULKAN_HPP_NOEXCEPT { presentAtAbsoluteTime = presentAtAbsoluteTime_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT && setPresentAtAbsoluteTime( Bool32 presentAtAbsoluteTime_ ) && VULKAN_HPP_NOEXCEPT { presentAtAbsoluteTime = presentAtAbsoluteTime_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT & setPresentAtRelativeTime( Bool32 presentAtRelativeTime_ ) & VULKAN_HPP_NOEXCEPT { presentAtRelativeTime = presentAtRelativeTime_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentTimingFeaturesEXT && setPresentAtRelativeTime( Bool32 presentAtRelativeTime_ ) && VULKAN_HPP_NOEXCEPT { presentAtRelativeTime = presentAtRelativeTime_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentTimingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentTimingFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentTimingFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentTimingFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentTiming, presentAtAbsoluteTime, presentAtRelativeTime ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentTimingFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentTiming == rhs.presentTiming ) && ( presentAtAbsoluteTime == rhs.presentAtAbsoluteTime ) && ( presentAtRelativeTime == rhs.presentAtRelativeTime ); # endif } bool operator!=( PhysicalDevicePresentTimingFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentTimingFeaturesEXT; void * pNext = {}; Bool32 presentTiming = {}; Bool32 presentAtAbsoluteTime = {}; Bool32 presentAtRelativeTime = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentTimingFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentTimingFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePresentWait2FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWait2FeaturesKHR.html struct PhysicalDevicePresentWait2FeaturesKHR { using NativeType = VkPhysicalDevicePresentWait2FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWait2FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR( Bool32 presentWait2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentWait2{ presentWait2_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWait2FeaturesKHR( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentWait2FeaturesKHR( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentWait2FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentWait2FeaturesKHR & operator=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentWait2FeaturesKHR & operator=( VkPhysicalDevicePresentWait2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR & setPresentWait2( Bool32 presentWait2_ ) & VULKAN_HPP_NOEXCEPT { presentWait2 = presentWait2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWait2FeaturesKHR && setPresentWait2( Bool32 presentWait2_ ) && VULKAN_HPP_NOEXCEPT { presentWait2 = presentWait2_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentWait2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentWait2FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentWait2FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentWait2FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentWait2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentWait2FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait2 == rhs.presentWait2 ); # endif } bool operator!=( PhysicalDevicePresentWait2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentWait2FeaturesKHR; void * pNext = {}; Bool32 presentWait2 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentWait2FeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentWait2FeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePresentWaitFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePresentWaitFeaturesKHR.html struct PhysicalDevicePresentWaitFeaturesKHR { using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , presentWait{ presentWait_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePresentWaitFeaturesKHR( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentWaitFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePresentWaitFeaturesKHR & operator=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePresentWaitFeaturesKHR & operator=( VkPhysicalDevicePresentWaitFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR & setPresentWait( Bool32 presentWait_ ) & VULKAN_HPP_NOEXCEPT { presentWait = presentWait_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentWaitFeaturesKHR && setPresentWait( Bool32 presentWait_ ) && VULKAN_HPP_NOEXCEPT { presentWait = presentWait_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePresentWaitFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentWaitFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePresentWaitFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePresentWaitFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentWait ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentWaitFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentWait == rhs.presentWait ); # endif } bool operator!=( PhysicalDevicePresentWaitFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePresentWaitFeaturesKHR; void * pNext = {}; Bool32 presentWait = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePresentWaitFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDevicePresentWaitFeaturesKHR; }; // wrapper struct for struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT.html struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { using NativeType = VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( Bool32 primitiveTopologyListRestart_ = {}, Bool32 primitiveTopologyPatchListRestart_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , primitiveTopologyListRestart{ primitiveTopologyListRestart_ } , primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & operator=( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyListRestart( Bool32 primitiveTopologyListRestart_ ) & VULKAN_HPP_NOEXCEPT { primitiveTopologyListRestart = primitiveTopologyListRestart_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT && setPrimitiveTopologyListRestart( Bool32 primitiveTopologyListRestart_ ) && VULKAN_HPP_NOEXCEPT { primitiveTopologyListRestart = primitiveTopologyListRestart_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT & setPrimitiveTopologyPatchListRestart( Bool32 primitiveTopologyPatchListRestart_ ) & VULKAN_HPP_NOEXCEPT { primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT && setPrimitiveTopologyPatchListRestart( Bool32 primitiveTopologyPatchListRestart_ ) && VULKAN_HPP_NOEXCEPT { primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, primitiveTopologyListRestart, primitiveTopologyPatchListRestart ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitiveTopologyListRestart == rhs.primitiveTopologyListRestart ) && ( primitiveTopologyPatchListRestart == rhs.primitiveTopologyPatchListRestart ); # endif } bool operator!=( PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; void * pNext = {}; Bool32 primitiveTopologyListRestart = {}; Bool32 primitiveTopologyPatchListRestart = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT.html struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { using NativeType = VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( Bool32 primitivesGeneratedQuery_ = {}, Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , primitivesGeneratedQuery{ primitivesGeneratedQuery_ } , primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ } , primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & operator=( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQuery( Bool32 primitivesGeneratedQuery_ ) & VULKAN_HPP_NOEXCEPT { primitivesGeneratedQuery = primitivesGeneratedQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT && setPrimitivesGeneratedQuery( Bool32 primitivesGeneratedQuery_ ) && VULKAN_HPP_NOEXCEPT { primitivesGeneratedQuery = primitivesGeneratedQuery_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithRasterizerDiscard( Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) & VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT && setPrimitivesGeneratedQueryWithRasterizerDiscard( Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ ) && VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithRasterizerDiscard = primitivesGeneratedQueryWithRasterizerDiscard_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT & setPrimitivesGeneratedQueryWithNonZeroStreams( Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) & VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT && setPrimitivesGeneratedQueryWithNonZeroStreams( Bool32 primitivesGeneratedQueryWithNonZeroStreams_ ) && VULKAN_HPP_NOEXCEPT { primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( primitivesGeneratedQuery == rhs.primitivesGeneratedQuery ) && ( primitivesGeneratedQueryWithRasterizerDiscard == rhs.primitivesGeneratedQueryWithRasterizerDiscard ) && ( primitivesGeneratedQueryWithNonZeroStreams == rhs.primitivesGeneratedQueryWithNonZeroStreams ); # endif } bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; void * pNext = {}; Bool32 primitivesGeneratedQuery = {}; Bool32 primitivesGeneratedQueryWithRasterizerDiscard = {}; Bool32 primitivesGeneratedQueryWithNonZeroStreams = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; }; // wrapper struct for struct VkPhysicalDevicePrivateDataFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePrivateDataFeatures.html struct PhysicalDevicePrivateDataFeatures { using NativeType = VkPhysicalDevicePrivateDataFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , privateData{ privateData_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePrivateDataFeatures & operator=( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPrivateData( Bool32 privateData_ ) & VULKAN_HPP_NOEXCEPT { privateData = privateData_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures && setPrivateData( Bool32 privateData_ ) && VULKAN_HPP_NOEXCEPT { privateData = privateData_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrivateDataFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePrivateDataFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePrivateDataFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, privateData ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; #else bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); # endif } bool operator!=( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; void * pNext = {}; Bool32 privateData = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePrivateDataFeatures; }; #endif template <> struct CppType { using Type = PhysicalDevicePrivateDataFeatures; }; using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; // wrapper struct for struct VkPhysicalDeviceProtectedMemoryFeatures, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html struct PhysicalDeviceProtectedMemoryFeatures { using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( Bool32 protectedMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , protectedMemory{ protectedMemory_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( Bool32 protectedMemory_ ) & VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures && setProtectedMemory( Bool32 protectedMemory_ ) && VULKAN_HPP_NOEXCEPT { protectedMemory = protectedMemory_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceProtectedMemoryFeatures const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryFeatures const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryFeatures *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, protectedMemory ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; #else bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); # endif } bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; void * pNext = {}; Bool32 protectedMemory = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProtectedMemoryFeatures; }; #endif template <> struct CppType { using Type = PhysicalDeviceProtectedMemoryFeatures; }; // wrapper struct for struct VkPhysicalDeviceProtectedMemoryProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProtectedMemoryProperties.html struct PhysicalDeviceProtectedMemoryProperties { using NativeType = VkPhysicalDeviceProtectedMemoryProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( Bool32 protectedNoFault_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , protectedNoFault{ protectedNoFault_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceProtectedMemoryProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProtectedMemoryProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, protectedNoFault ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; #else bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); # endif } bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; void * pNext = {}; Bool32 protectedNoFault = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProtectedMemoryProperties; }; #endif template <> struct CppType { using Type = PhysicalDeviceProtectedMemoryProperties; }; // wrapper struct for struct VkPhysicalDeviceProvokingVertexFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexFeaturesEXT.html struct PhysicalDeviceProvokingVertexFeaturesEXT { using NativeType = VkPhysicalDeviceProvokingVertexFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( Bool32 provokingVertexLast_ = {}, Bool32 transformFeedbackPreservesProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , provokingVertexLast{ provokingVertexLast_ } , transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexFeaturesEXT( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProvokingVertexFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProvokingVertexFeaturesEXT & operator=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProvokingVertexFeaturesEXT & operator=( VkPhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setProvokingVertexLast( Bool32 provokingVertexLast_ ) & VULKAN_HPP_NOEXCEPT { provokingVertexLast = provokingVertexLast_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT && setProvokingVertexLast( Bool32 provokingVertexLast_ ) && VULKAN_HPP_NOEXCEPT { provokingVertexLast = provokingVertexLast_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT & setTransformFeedbackPreservesProvokingVertex( Bool32 transformFeedbackPreservesProvokingVertex_ ) & VULKAN_HPP_NOEXCEPT { transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProvokingVertexFeaturesEXT && setTransformFeedbackPreservesProvokingVertex( Bool32 transformFeedbackPreservesProvokingVertex_ ) && VULKAN_HPP_NOEXCEPT { transformFeedbackPreservesProvokingVertex = transformFeedbackPreservesProvokingVertex_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceProvokingVertexFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && ( transformFeedbackPreservesProvokingVertex == rhs.transformFeedbackPreservesProvokingVertex ); # endif } bool operator!=( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; void * pNext = {}; Bool32 provokingVertexLast = {}; Bool32 transformFeedbackPreservesProvokingVertex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProvokingVertexFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceProvokingVertexFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceProvokingVertexPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceProvokingVertexPropertiesEXT.html struct PhysicalDeviceProvokingVertexPropertiesEXT { using NativeType = VkPhysicalDeviceProvokingVertexPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( Bool32 provokingVertexModePerPipeline_ = {}, Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ } , transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceProvokingVertexPropertiesEXT( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProvokingVertexPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceProvokingVertexPropertiesEXT & operator=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceProvokingVertexPropertiesEXT & operator=( VkPhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceProvokingVertexPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceProvokingVertexPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && ( transformFeedbackPreservesTriangleFanProvokingVertex == rhs.transformFeedbackPreservesTriangleFanProvokingVertex ); # endif } bool operator!=( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; void * pNext = {}; Bool32 provokingVertexModePerPipeline = {}; Bool32 transformFeedbackPreservesTriangleFanProvokingVertex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; // wrapper struct for struct VkPhysicalDevicePushConstantBankFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushConstantBankFeaturesNV.html struct PhysicalDevicePushConstantBankFeaturesNV { using NativeType = VkPhysicalDevicePushConstantBankFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushConstantBankFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankFeaturesNV( Bool32 pushConstantBank_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pushConstantBank{ pushConstantBank_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankFeaturesNV( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushConstantBankFeaturesNV( VkPhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePushConstantBankFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePushConstantBankFeaturesNV & operator=( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePushConstantBankFeaturesNV & operator=( VkPhysicalDevicePushConstantBankFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV & setPushConstantBank( Bool32 pushConstantBank_ ) & VULKAN_HPP_NOEXCEPT { pushConstantBank = pushConstantBank_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushConstantBankFeaturesNV && setPushConstantBank( Bool32 pushConstantBank_ ) && VULKAN_HPP_NOEXCEPT { pushConstantBank = pushConstantBank_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDevicePushConstantBankFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, pushConstantBank ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePushConstantBankFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pushConstantBank == rhs.pushConstantBank ); # endif } bool operator!=( PhysicalDevicePushConstantBankFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePushConstantBankFeaturesNV; void * pNext = {}; Bool32 pushConstantBank = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePushConstantBankFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePushConstantBankFeaturesNV; }; // wrapper struct for struct VkPhysicalDevicePushConstantBankPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushConstantBankPropertiesNV.html struct PhysicalDevicePushConstantBankPropertiesNV { using NativeType = VkPhysicalDevicePushConstantBankPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushConstantBankPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankPropertiesNV( uint32_t maxGraphicsPushConstantBanks_ = {}, uint32_t maxComputePushConstantBanks_ = {}, uint32_t maxGraphicsPushDataBanks_ = {}, uint32_t maxComputePushDataBanks_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxGraphicsPushConstantBanks{ maxGraphicsPushConstantBanks_ } , maxComputePushConstantBanks{ maxComputePushConstantBanks_ } , maxGraphicsPushDataBanks{ maxGraphicsPushDataBanks_ } , maxComputePushDataBanks{ maxComputePushDataBanks_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePushConstantBankPropertiesNV( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushConstantBankPropertiesNV( VkPhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePushConstantBankPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePushConstantBankPropertiesNV & operator=( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePushConstantBankPropertiesNV & operator=( VkPhysicalDevicePushConstantBankPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePushConstantBankPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePushConstantBankPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxGraphicsPushConstantBanks, maxComputePushConstantBanks, maxGraphicsPushDataBanks, maxComputePushDataBanks ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePushConstantBankPropertiesNV const & ) const = default; #else bool operator==( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsPushConstantBanks == rhs.maxGraphicsPushConstantBanks ) && ( maxComputePushConstantBanks == rhs.maxComputePushConstantBanks ) && ( maxGraphicsPushDataBanks == rhs.maxGraphicsPushDataBanks ) && ( maxComputePushDataBanks == rhs.maxComputePushDataBanks ); # endif } bool operator!=( PhysicalDevicePushConstantBankPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePushConstantBankPropertiesNV; void * pNext = {}; uint32_t maxGraphicsPushConstantBanks = {}; uint32_t maxComputePushConstantBanks = {}; uint32_t maxGraphicsPushDataBanks = {}; uint32_t maxComputePushDataBanks = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePushConstantBankPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDevicePushConstantBankPropertiesNV; }; // wrapper struct for struct VkPhysicalDevicePushDescriptorProperties, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDevicePushDescriptorProperties.html struct PhysicalDevicePushDescriptorProperties { using NativeType = VkPhysicalDevicePushDescriptorProperties; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorProperties; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxPushDescriptors{ maxPushDescriptors_ } { } VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePushDescriptorProperties( *reinterpret_cast( &rhs ) ) { } PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDevicePushDescriptorProperties const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDevicePushDescriptorProperties *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, maxPushDescriptors ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default; #else bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxPushDescriptors == rhs.maxPushDescriptors ); # endif } bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDevicePushDescriptorProperties; void * pNext = {}; uint32_t maxPushDescriptors = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDevicePushDescriptorProperties; }; #endif template <> struct CppType { using Type = PhysicalDevicePushDescriptorProperties; }; using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; // wrapper struct for struct VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM.html struct PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM { using NativeType = VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( uint32_t queueFamilyIndex_ = {}, PhysicalDeviceDataGraphProcessingEngineTypeARM engineType_ = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , queueFamilyIndex{ queueFamilyIndex_ } , engineType{ engineType_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & operator=( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & operator=( VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & setPNext( const void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM && setPNext( const void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) & VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM && setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) && VULKAN_HPP_NOEXCEPT { queueFamilyIndex = queueFamilyIndex_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM & setEngineType( PhysicalDeviceDataGraphProcessingEngineTypeARM engineType_ ) & VULKAN_HPP_NOEXCEPT { engineType = engineType_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM && setEngineType( PhysicalDeviceDataGraphProcessingEngineTypeARM engineType_ ) && VULKAN_HPP_NOEXCEPT { engineType = engineType_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, queueFamilyIndex, engineType ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & ) const = default; #else bool operator==( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( queueFamilyIndex == rhs.queueFamilyIndex ) && ( engineType == rhs.engineType ); # endif } bool operator!=( PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM; const void * pNext = {}; uint32_t queueFamilyIndex = {}; PhysicalDeviceDataGraphProcessingEngineTypeARM engineType = PhysicalDeviceDataGraphProcessingEngineTypeARM::eDefault; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM; }; #endif template <> struct CppType { using Type = PhysicalDeviceQueueFamilyDataGraphProcessingEngineInfoARM; }; // wrapper struct for struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT.html struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT { using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRGBA10X6FormatsFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRGBA10X6FormatsFeaturesEXT & operator=( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setFormatRgba10x6WithoutYCbCrSampler( Bool32 formatRgba10x6WithoutYCbCrSampler_ ) & VULKAN_HPP_NOEXCEPT { formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT && setFormatRgba10x6WithoutYCbCrSampler( Bool32 formatRgba10x6WithoutYCbCrSampler_ ) && VULKAN_HPP_NOEXCEPT { formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, formatRgba10x6WithoutYCbCrSampler ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( formatRgba10x6WithoutYCbCrSampler == rhs.formatRgba10x6WithoutYCbCrSampler ); # endif } bool operator!=( PhysicalDeviceRGBA10X6FormatsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT; void * pNext = {}; Bool32 formatRgba10x6WithoutYCbCrSampler = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceRGBA10X6FormatsFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.html struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { using NativeType = VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( Bool32 rasterizationOrderColorAttachmentAccess_ = {}, Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ } , rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ } , rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & operator=( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderColorAttachmentAccess( Bool32 rasterizationOrderColorAttachmentAccess_ ) & VULKAN_HPP_NOEXCEPT { rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT && setRasterizationOrderColorAttachmentAccess( Bool32 rasterizationOrderColorAttachmentAccess_ ) && VULKAN_HPP_NOEXCEPT { rasterizationOrderColorAttachmentAccess = rasterizationOrderColorAttachmentAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderDepthAttachmentAccess( Bool32 rasterizationOrderDepthAttachmentAccess_ ) & VULKAN_HPP_NOEXCEPT { rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT && setRasterizationOrderDepthAttachmentAccess( Bool32 rasterizationOrderDepthAttachmentAccess_ ) && VULKAN_HPP_NOEXCEPT { rasterizationOrderDepthAttachmentAccess = rasterizationOrderDepthAttachmentAccess_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT & setRasterizationOrderStencilAttachmentAccess( Bool32 rasterizationOrderStencilAttachmentAccess_ ) & VULKAN_HPP_NOEXCEPT { rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT && setRasterizationOrderStencilAttachmentAccess( Bool32 rasterizationOrderStencilAttachmentAccess_ ) && VULKAN_HPP_NOEXCEPT { rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rasterizationOrderColorAttachmentAccess, rasterizationOrderDepthAttachmentAccess, rasterizationOrderStencilAttachmentAccess ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rasterizationOrderColorAttachmentAccess == rhs.rasterizationOrderColorAttachmentAccess ) && ( rasterizationOrderDepthAttachmentAccess == rhs.rasterizationOrderDepthAttachmentAccess ) && ( rasterizationOrderStencilAttachmentAccess == rhs.rasterizationOrderStencilAttachmentAccess ); # endif } bool operator!=( PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; void * pNext = {}; Bool32 rasterizationOrderColorAttachmentAccess = {}; Bool32 rasterizationOrderDepthAttachmentAccess = {}; Bool32 rasterizationOrderStencilAttachmentAccess = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; }; using PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; // wrapper struct for struct VkPhysicalDeviceRawAccessChainsFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRawAccessChainsFeaturesNV.html struct PhysicalDeviceRawAccessChainsFeaturesNV { using NativeType = VkPhysicalDeviceRawAccessChainsFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( Bool32 shaderRawAccessChains_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderRawAccessChains{ shaderRawAccessChains_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRawAccessChainsFeaturesNV( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRawAccessChainsFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRawAccessChainsFeaturesNV & operator=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRawAccessChainsFeaturesNV & operator=( VkPhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV & setShaderRawAccessChains( Bool32 shaderRawAccessChains_ ) & VULKAN_HPP_NOEXCEPT { shaderRawAccessChains = shaderRawAccessChains_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRawAccessChainsFeaturesNV && setShaderRawAccessChains( Bool32 shaderRawAccessChains_ ) && VULKAN_HPP_NOEXCEPT { shaderRawAccessChains = shaderRawAccessChains_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRawAccessChainsFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRawAccessChainsFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRawAccessChainsFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRawAccessChainsFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderRawAccessChains ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRawAccessChainsFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRawAccessChains == rhs.shaderRawAccessChains ); # endif } bool operator!=( PhysicalDeviceRawAccessChainsFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV; void * pNext = {}; Bool32 shaderRawAccessChains = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRawAccessChainsFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceRawAccessChainsFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceRayQueryFeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayQueryFeaturesKHR.html struct PhysicalDeviceRayQueryFeaturesKHR { using NativeType = VkPhysicalDeviceRayQueryFeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayQuery{ rayQuery_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( Bool32 rayQuery_ ) & VULKAN_HPP_NOEXCEPT { rayQuery = rayQuery_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR && setRayQuery( Bool32 rayQuery_ ) && VULKAN_HPP_NOEXCEPT { rayQuery = rayQuery_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRayQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayQueryFeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayQueryFeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayQuery ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayQuery == rhs.rayQuery ); # endif } bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR; void * pNext = {}; Bool32 rayQuery = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayQueryFeaturesKHR; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayQueryFeaturesKHR; }; // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT.html struct PhysicalDeviceRayTracingInvocationReorderFeaturesEXT { using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( Bool32 rayTracingInvocationReorder_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayTracingInvocationReorder{ rayTracingInvocationReorder_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingInvocationReorderFeaturesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT & setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) & VULKAN_HPP_NOEXCEPT { rayTracingInvocationReorder = rayTracingInvocationReorder_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesEXT && setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) && VULKAN_HPP_NOEXCEPT { rayTracingInvocationReorder = rayTracingInvocationReorder_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayTracingInvocationReorder ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); # endif } bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesEXT; void * pNext = {}; Bool32 rayTracingInvocationReorder = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesEXT; }; // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV.html struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV { using NativeType = VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( Bool32 rayTracingInvocationReorder_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayTracingInvocationReorder{ rayTracingInvocationReorder_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingInvocationReorderFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingInvocationReorderFeaturesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV & setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) & VULKAN_HPP_NOEXCEPT { rayTracingInvocationReorder = rayTracingInvocationReorder_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingInvocationReorderFeaturesNV && setRayTracingInvocationReorder( Bool32 rayTracingInvocationReorder_ ) && VULKAN_HPP_NOEXCEPT { rayTracingInvocationReorder = rayTracingInvocationReorder_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayTracingInvocationReorder ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorder == rhs.rayTracingInvocationReorder ); # endif } bool operator!=( PhysicalDeviceRayTracingInvocationReorderFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderFeaturesNV; void * pNext = {}; Bool32 rayTracingInvocationReorder = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT.html struct PhysicalDeviceRayTracingInvocationReorderPropertiesEXT { using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint_ = RayTracingInvocationReorderModeEXT::eNone, uint32_t maxShaderBindingTableRecordIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ } , maxShaderBindingTableRecordIndex{ maxShaderBindingTableRecordIndex_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingInvocationReorderPropertiesEXT( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingInvocationReorderPropertiesEXT & operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingInvocationReorderPropertiesEXT & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesEXT *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint, maxShaderBindingTableRecordIndex ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ) && ( maxShaderBindingTableRecordIndex == rhs.maxShaderBindingTableRecordIndex ); # endif } bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesEXT; void * pNext = {}; RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint = RayTracingInvocationReorderModeEXT::eNone; uint32_t maxShaderBindingTableRecordIndex = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesEXT; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesEXT; }; // wrapper struct for struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV.html struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV { using NativeType = VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint_ = RayTracingInvocationReorderModeEXT::eNone, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderPropertiesNV( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingInvocationReorderPropertiesNV( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingInvocationReorderPropertiesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingInvocationReorderPropertiesNV & operator=( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayTracingInvocationReorderReorderingHint ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingInvocationReorderReorderingHint == rhs.rayTracingInvocationReorderReorderingHint ); # endif } bool operator!=( PhysicalDeviceRayTracingInvocationReorderPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV; void * pNext = {}; RayTracingInvocationReorderModeEXT rayTracingInvocationReorderReorderingHint = RayTracingInvocationReorderModeEXT::eNone; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayTracingInvocationReorderPropertiesNV; }; // wrapper struct for struct VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV.html struct PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV { using NativeType = VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( Bool32 spheres_ = {}, Bool32 linearSweptSpheres_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , spheres{ spheres_ } , linearSweptSpheres{ linearSweptSpheres_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & operator=( VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setSpheres( Bool32 spheres_ ) & VULKAN_HPP_NOEXCEPT { spheres = spheres_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV && setSpheres( Bool32 spheres_ ) && VULKAN_HPP_NOEXCEPT { spheres = spheres_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV & setLinearSweptSpheres( Bool32 linearSweptSpheres_ ) & VULKAN_HPP_NOEXCEPT { linearSweptSpheres = linearSweptSpheres_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV && setLinearSweptSpheres( Bool32 linearSweptSpheres_ ) && VULKAN_HPP_NOEXCEPT { linearSweptSpheres = linearSweptSpheres_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, spheres, linearSweptSpheres ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( spheres == rhs.spheres ) && ( linearSweptSpheres == rhs.linearSweptSpheres ); # endif } bool operator!=( PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; void * pNext = {}; Bool32 spheres = {}; Bool32 linearSweptSpheres = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; }; #endif template <> struct CppType { using Type = PhysicalDeviceRayTracingLinearSweptSpheresFeaturesNV; }; // wrapper struct for struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR, see // https://registry.khronos.org/vulkan/specs/latest/man/html/VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR.html struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR { using NativeType = VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; static const bool allowDuplicate = false; static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; #if !defined( VULKAN_HPP_NO_CONSTRUCTORS ) && !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( Bool32 rayTracingMaintenance1_ = {}, Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rayTracingMaintenance1{ rayTracingMaintenance1_ } , rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ } { } VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceRayTracingMaintenance1FeaturesKHR( *reinterpret_cast( &rhs ) ) { } PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_CONSTRUCTORS*/ PhysicalDeviceRayTracingMaintenance1FeaturesKHR & operator=( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_SETTERS ) && !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) & VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR && setPNext( void * pNext_ ) && VULKAN_HPP_NOEXCEPT { pNext = pNext_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingMaintenance1( Bool32 rayTracingMaintenance1_ ) & VULKAN_HPP_NOEXCEPT { rayTracingMaintenance1 = rayTracingMaintenance1_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR && setRayTracingMaintenance1( Bool32 rayTracingMaintenance1_ ) && VULKAN_HPP_NOEXCEPT { rayTracingMaintenance1 = rayTracingMaintenance1_; return std::move( *this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setRayTracingPipelineTraceRaysIndirect2( Bool32 rayTracingPipelineTraceRaysIndirect2_ ) & VULKAN_HPP_NOEXCEPT { rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR && setRayTracingPipelineTraceRaysIndirect2( Bool32 rayTracingPipelineTraceRaysIndirect2_ ) && VULKAN_HPP_NOEXCEPT { rayTracingPipelineTraceRaysIndirect2 = rayTracingPipelineTraceRaysIndirect2_; return std::move( *this ); } #endif /*VULKAN_HPP_NO_SETTERS*/ operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR const *() const VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } operator VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR *() VULKAN_HPP_NOEXCEPT { return reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) std::tuple reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, rayTracingMaintenance1, rayTracingPipelineTraceRaysIndirect2 ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( rayTracingMaintenance1 == rhs.rayTracingMaintenance1 ) && ( rayTracingPipelineTraceRaysIndirect2 == rhs.rayTracingPipelineTraceRaysIndirect2 ); # endif } bool operator!=( PhysicalDeviceRayTracingMaintenance1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: StructureType sType = StructureType::ePhysicalDeviceRayTracingMaintenance1FeaturesKHR; void * pNext = {}; Bool32 rayTracingMaintenance1 = {}; Bool32 rayTracingPipelineTraceRaysIndirect2 = {}; }; #if 20 <= VULKAN_HPP_CPP_VERSION template <> struct CppType { using Type = PhysicalDeviceRayTracingMaintenance1FeaturesKHR; }; #endif template <> struct CppType { using Type = Physica